mirror of
https://github.com/ets-cfuhrman-pfe/EvalueTonSavoir.git
synced 2025-08-11 21:23:54 -04:00
PFEH2025 - merge dev PFEA2024 it2-it3-it4 SSO available
This commit is contained in:
parent
3027c0bc1f
commit
2dc2974f0a
126 changed files with 11627 additions and 1769 deletions
31
.github/workflows/create-branch-images.yml
vendored
31
.github/workflows/create-branch-images.yml
vendored
|
|
@ -103,4 +103,35 @@ jobs:
|
|||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-quizroom:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata for Quizroom Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}-quizroom
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=semver,pattern={{version}}
|
||||
- name: Build and push Quizroom Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./quizRoom
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: linux/amd64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
33
.github/workflows/create-docs.yml
vendored
Normal file
33
.github/workflows/create-docs.yml
vendored
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
name: Creates docs and deploy to gh-pages
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Deploy docs
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
PUMLURL: "https://www.plantuml.com/plantuml/"
|
||||
steps:
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./documentation
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Build docs
|
||||
working-directory: ./documentation
|
||||
run: mkdocs build --verbose --clean
|
||||
|
||||
- name: Push docs to gh-pages
|
||||
working-directory: ./documentation
|
||||
run: python deploy.py
|
||||
32
.github/workflows/tests.yml
vendored
Normal file
32
.github/workflows/tests.yml
vendored
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
name: Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Install Dependencies and Run Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm test
|
||||
working-directory: ${{ matrix.directory }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
directory: [client, server]
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
|
|
@ -73,7 +73,7 @@ web_modules/
|
|||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
server/.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
|
@ -128,3 +128,17 @@ dist
|
|||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
db-backup/
|
||||
|
||||
**/.env
|
||||
.venv
|
||||
deployments
|
||||
/test/stressTest/output
|
||||
|
||||
# Opentofu state
|
||||
opentofu/*/.terraform
|
||||
opentofu/*/.terraform.lock*
|
||||
opentofu/*/terraform.tfstate*
|
||||
opentofu/*/terraform.tfvars
|
||||
# Opentofu auth config
|
||||
opentofu/auth_config.json
|
||||
|
|
|
|||
16
.vscode/launch.json
vendored
16
.vscode/launch.json
vendored
|
|
@ -20,6 +20,20 @@
|
|||
"name": "Debug frontend",
|
||||
"url": "http://localhost:5173",
|
||||
"webRoot": "${workspaceFolder}/client/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Docker: Attach to Node",
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"restart": true,
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "/app",
|
||||
"protocol": "inspector",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
10
ansible/Dockerfile
Normal file
10
ansible/Dockerfile
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
FROM python:3.9-slim
|
||||
|
||||
# Installer Ansible
|
||||
RUN pip install ansible
|
||||
|
||||
# Définir le répertoire de travail
|
||||
WORKDIR /ansible
|
||||
|
||||
# Copier les fichiers nécessaires
|
||||
COPY inventory.ini deploy.yml ./
|
||||
40
ansible/README.md
Normal file
40
ansible/README.md
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
# Déploiement de Services avec Ansible et Docker Compose
|
||||
|
||||
Ce guide explique comment utiliser Ansible pour configurer et déployer des services Docker avec `docker-compose`.
|
||||
|
||||
## Prérequis
|
||||
|
||||
1. **Ansible** : Assurez-vous qu'Ansible est installé sur votre système.
|
||||
- [Guide d'installation d'Ansible](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html)
|
||||
|
||||
2. **Docker et Docker Compose** : Docker doit être installé et configuré pour fonctionner avec Ansible.
|
||||
- Installez Docker : [Documentation Docker](https://docs.docker.com/get-docker/)
|
||||
- Docker Compose est inclus comme plugin Docker dans les versions récentes de Docker.
|
||||
|
||||
3. **WSL (pour Windows)** : Si vous utilisez Windows, assurez-vous d'avoir configuré WSL et un environnement Ubuntu.
|
||||
|
||||
## Structure du projet
|
||||
|
||||
Le fichier `deploy.yml` contient les tâches Ansible nécessaires pour télécharger, configurer, et démarrer les services Docker en utilisant Docker Compose.
|
||||
|
||||
## Installation et de déploiement
|
||||
|
||||
### Lancer le déploiement avec Ansible
|
||||
|
||||
Pour exécuter le playbook Ansible `deploy.yml`, utilisez la commande suivante depuis le répertoire racine du projet :
|
||||
|
||||
`ansible-playbook -i inventory.ini deploy.yml`
|
||||
|
||||
### Vérification du déploiement
|
||||
|
||||
Une fois le playbook exécuté, Ansible télécharge Docker et Docker Compose, télécharge le fichier `docker-compose.yaml`, démarre Docker et lance les conteneurs spécifiés.
|
||||
|
||||
### Configuration et contenu du Playbook (deploy.yml)
|
||||
Le playbook deploy.yml exécute les étapes suivantes :
|
||||
|
||||
1. Télécharge Docker Compose si ce dernier n'est pas encore présent.
|
||||
2. Vérifie l'installation de Docker Compose pour s'assurer qu'il est opérationnel.
|
||||
3. Démarre le service Docker si ce n'est pas déjà le cas.
|
||||
4. Télécharge le fichier docker-compose.yaml depuis le dépôt Git spécifié.
|
||||
5. Lance Docker Compose pour déployer les conteneurs définis dans docker-compose.yaml.
|
||||
6. Vérifie l'état des conteneurs et affiche les conteneurs en cours d'exécution.
|
||||
38
ansible/deploy.yml
Normal file
38
ansible/deploy.yml
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
- name: Déployer des services avec Docker Compose
|
||||
hosts: local
|
||||
tasks:
|
||||
|
||||
- name: Télécharger Docker
|
||||
ansible.builtin.package:
|
||||
name: docker-compose
|
||||
state: present
|
||||
|
||||
- name: Vérifier l'installation de Docker Compose plugin
|
||||
ansible.builtin.command:
|
||||
cmd: docker compose version
|
||||
|
||||
- name: Commencer le service docker
|
||||
ansible.builtin.service:
|
||||
name: docker
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: Telecharger le fichier docker-compose
|
||||
ansible.builtin.get_url:
|
||||
url: https://raw.githubusercontent.com/ets-cfuhrman-pfe/EvalueTonSavoir/refs/heads/main/docker-compose.yaml
|
||||
dest: ./docker-compose.yaml
|
||||
|
||||
- name: Lancer Docker Compose
|
||||
ansible.builtin.shell:
|
||||
docker-compose up -d
|
||||
become: true
|
||||
|
||||
- name: Vérification des services Docker
|
||||
ansible.builtin.command:
|
||||
cmd: docker ps
|
||||
register: docker_ps_output
|
||||
|
||||
- name: Afficher l'état des conteneurs Docker
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ docker_ps_output.stdout }}"
|
||||
70
ansible/docker-compose.yaml
Normal file
70
ansible/docker-compose.yaml
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
services:
|
||||
|
||||
frontend:
|
||||
image: fuhrmanator/evaluetonsavoir-frontend:latest
|
||||
container_name: frontend
|
||||
ports:
|
||||
- "5173:5173"
|
||||
environment:
|
||||
VITE_BACKEND_URL: "http://localhost:4400"
|
||||
# don't define VITE_BACKEND_SOCKET_URL so it will default to window.location.host
|
||||
# VITE_BACKEND_SOCKET_URL: ""
|
||||
restart: always
|
||||
|
||||
backend:
|
||||
image: fuhrmanator/evaluetonsavoir-backend:latest
|
||||
container_name: backend
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
PORT: 3000
|
||||
MONGO_URI: "mongodb://mongo:27017/evaluetonsavoir"
|
||||
MONGO_DATABASE: evaluetonsavoir
|
||||
EMAIL_SERVICE: gmail
|
||||
SENDER_EMAIL: infoevaluetonsavoir@gmail.com
|
||||
EMAIL_PSW: 'vvml wmfr dkzb vjzb'
|
||||
JWT_SECRET: haQdgd2jp09qb897GeBZyJetC8ECSpbFJe
|
||||
FRONTEND_URL: "http://localhost:5173"
|
||||
depends_on:
|
||||
- mongo
|
||||
restart: always
|
||||
|
||||
# Ce conteneur sert de routeur pour assurer le bon fonctionnement de l'application
|
||||
nginx:
|
||||
image: fuhrmanator/evaluetonsavoir-routeur:latest
|
||||
container_name: nginx
|
||||
ports:
|
||||
- "80:80"
|
||||
depends_on:
|
||||
- backend
|
||||
- frontend
|
||||
restart: always
|
||||
|
||||
# Ce conteneur est la base de données principale pour l'application
|
||||
mongo:
|
||||
image: mongo
|
||||
container_name: mongo
|
||||
ports:
|
||||
- "27017:27017"
|
||||
tty: true
|
||||
volumes:
|
||||
- mongodb_data:/data/db
|
||||
restart: always
|
||||
|
||||
# Ce conteneur assure que l'application est à jour en allant chercher s'il y a des mises à jours à chaque heure
|
||||
watchtower:
|
||||
image: containrrr/watchtower
|
||||
container_name: watchtower
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
environment:
|
||||
- TZ=America/Montreal
|
||||
- WATCHTOWER_CLEANUP=true
|
||||
- WATCHTOWER_DEBUG=true
|
||||
- WATCHTOWER_INCLUDE_RESTARTING=true
|
||||
- WATCHTOWER_SCHEDULE=0 0 5 * * * # At 5 am everyday
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
mongodb_data:
|
||||
external: false
|
||||
9
ansible/inventory.ini
Normal file
9
ansible/inventory.ini
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# Spécifier les serveurs où vous souhaitez déployer votre application.
|
||||
# Remplacez votre_ip_serveur par l’adresse IP de votre serveur, et votre_utilisateur_ssh par le nom d’utilisateur SSH.
|
||||
|
||||
# Pour les serveurs
|
||||
# [app_servers]
|
||||
# votre_ip_serveur ansible_user=votre_utilisateur_ssh
|
||||
|
||||
[local]
|
||||
localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3
|
||||
|
|
@ -12,6 +12,10 @@ RUN npm install
|
|||
|
||||
RUN npm run build
|
||||
|
||||
EXPOSE 5173
|
||||
ENV PORT=5173
|
||||
EXPOSE ${PORT}
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
|
||||
CMD curl -f http://localhost:${PORT} || exit 1
|
||||
|
||||
CMD [ "npm", "run", "preview" ]
|
||||
309
client/package-lock.json
generated
309
client/package-lock.json
generated
|
|
@ -19,6 +19,7 @@
|
|||
"@mui/material": "^6.1.0",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"axios": "^1.6.7",
|
||||
"dockerode": "^4.0.2",
|
||||
"esbuild": "^0.23.1",
|
||||
"gift-pegjs": "^1.0.2",
|
||||
"jest-environment-jsdom": "^29.7.0",
|
||||
|
|
@ -1898,6 +1899,12 @@
|
|||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@balena/dockerignore": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz",
|
||||
"integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==",
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"node_modules/@bcoe/v8-coverage": {
|
||||
"version": "0.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz",
|
||||
|
|
@ -5191,6 +5198,15 @@
|
|||
"dequal": "^2.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/asn1": {
|
||||
"version": "0.2.6",
|
||||
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
|
||||
"integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"safer-buffer": "~2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/async": {
|
||||
"version": "3.2.6",
|
||||
"resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz",
|
||||
|
|
@ -5459,6 +5475,35 @@
|
|||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
|
||||
},
|
||||
"node_modules/base64-js": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
|
||||
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/bcrypt-pbkdf": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
|
||||
"integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"tweetnacl": "^0.14.3"
|
||||
}
|
||||
},
|
||||
"node_modules/binary-extensions": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
|
||||
|
|
@ -5470,6 +5515,17 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/bl": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
|
||||
"integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"buffer": "^5.5.0",
|
||||
"inherits": "^2.0.4",
|
||||
"readable-stream": "^3.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
|
|
@ -5543,12 +5599,45 @@
|
|||
"node-int64": "^0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer": {
|
||||
"version": "5.7.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
|
||||
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"base64-js": "^1.3.1",
|
||||
"ieee754": "^1.1.13"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-from": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
|
||||
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/buildcheck": {
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz",
|
||||
"integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/callsites": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
|
||||
|
|
@ -5659,6 +5748,12 @@
|
|||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/chownr": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
|
||||
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/ci-info": {
|
||||
"version": "3.9.0",
|
||||
"resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz",
|
||||
|
|
@ -5787,6 +5882,20 @@
|
|||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/cpu-features": {
|
||||
"version": "0.0.10",
|
||||
"resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
|
||||
"integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"buildcheck": "~0.0.6",
|
||||
"nan": "^2.19.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/create-jest": {
|
||||
"version": "29.7.0",
|
||||
"resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz",
|
||||
|
|
@ -6058,6 +6167,35 @@
|
|||
"node": "^14.15.0 || ^16.10.0 || >=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/docker-modem": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.3.tgz",
|
||||
"integrity": "sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"readable-stream": "^3.5.0",
|
||||
"split-ca": "^1.0.1",
|
||||
"ssh2": "^1.15.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/dockerode": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.2.tgz",
|
||||
"integrity": "sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@balena/dockerignore": "^1.0.2",
|
||||
"docker-modem": "^5.0.3",
|
||||
"tar-fs": "~2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/dom-accessibility-api": {
|
||||
"version": "0.5.16",
|
||||
"resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz",
|
||||
|
|
@ -6124,6 +6262,15 @@
|
|||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/end-of-stream": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
|
||||
"integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"once": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/engine.io-client": {
|
||||
"version": "6.5.4",
|
||||
"resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.5.4.tgz",
|
||||
|
|
@ -6798,6 +6945,12 @@
|
|||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/fs-constants": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
|
||||
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/fs-extra": {
|
||||
"version": "11.2.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz",
|
||||
|
|
@ -7079,6 +7232,26 @@
|
|||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/ieee754": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
|
||||
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"license": "BSD-3-Clause"
|
||||
},
|
||||
"node_modules/ignore": {
|
||||
"version": "5.3.2",
|
||||
"resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz",
|
||||
|
|
@ -7154,8 +7327,7 @@
|
|||
"node_modules/inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||
"dev": true
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"node_modules/is-arrayish": {
|
||||
"version": "0.2.1",
|
||||
|
|
@ -10217,11 +10389,24 @@
|
|||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/mkdirp-classic": {
|
||||
"version": "0.5.3",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz",
|
||||
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
|
||||
},
|
||||
"node_modules/nan": {
|
||||
"version": "2.22.0",
|
||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz",
|
||||
"integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==",
|
||||
"license": "MIT",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/nanoid": {
|
||||
"version": "5.0.7",
|
||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.7.tgz",
|
||||
|
|
@ -10293,7 +10478,6 @@
|
|||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
|
|
@ -10670,6 +10854,16 @@
|
|||
"resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz",
|
||||
"integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag=="
|
||||
},
|
||||
"node_modules/pump": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz",
|
||||
"integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"end-of-stream": "^1.1.0",
|
||||
"once": "^1.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/punycode": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
|
||||
|
|
@ -10814,6 +11008,20 @@
|
|||
"react-dom": ">=16.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/readable-stream": {
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/readdirp": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
|
||||
|
|
@ -11056,6 +11264,26 @@
|
|||
"queue-microtask": "^1.2.2"
|
||||
}
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
|
|
@ -11191,12 +11419,35 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/split-ca": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz",
|
||||
"integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/sprintf-js": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
||||
"integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ssh2": {
|
||||
"version": "1.16.0",
|
||||
"resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz",
|
||||
"integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"asn1": "^0.2.6",
|
||||
"bcrypt-pbkdf": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.16.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"cpu-features": "~0.0.10",
|
||||
"nan": "^2.20.0"
|
||||
}
|
||||
},
|
||||
"node_modules/stack-utils": {
|
||||
"version": "2.0.6",
|
||||
"resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz",
|
||||
|
|
@ -11216,6 +11467,15 @@
|
|||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/string-length": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz",
|
||||
|
|
@ -11328,6 +11588,34 @@
|
|||
"resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz",
|
||||
"integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw=="
|
||||
},
|
||||
"node_modules/tar-fs": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.1.tgz",
|
||||
"integrity": "sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chownr": "^1.1.1",
|
||||
"mkdirp-classic": "^0.5.2",
|
||||
"pump": "^3.0.0",
|
||||
"tar-stream": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tar-stream": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
|
||||
"integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"bl": "^4.0.3",
|
||||
"end-of-stream": "^1.4.1",
|
||||
"fs-constants": "^1.0.0",
|
||||
"inherits": "^2.0.3",
|
||||
"readable-stream": "^3.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/test-exclude": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
|
||||
|
|
@ -11548,6 +11836,12 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"node_modules/tweetnacl": {
|
||||
"version": "0.14.5",
|
||||
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
|
||||
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==",
|
||||
"license": "Unlicense"
|
||||
},
|
||||
"node_modules/type-check": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
|
||||
|
|
@ -11774,6 +12068,12 @@
|
|||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/uuid": {
|
||||
"version": "9.0.1",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz",
|
||||
|
|
@ -12646,8 +12946,7 @@
|
|||
"node_modules/wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
|
||||
"dev": true
|
||||
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
|
||||
},
|
||||
"node_modules/write-file-atomic": {
|
||||
"version": "4.0.2",
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@
|
|||
"@mui/material": "^6.1.0",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"axios": "^1.6.7",
|
||||
"dockerode": "^4.0.2",
|
||||
"esbuild": "^0.23.1",
|
||||
"gift-pegjs": "^1.0.2",
|
||||
"jest-environment-jsdom": "^29.7.0",
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ describe('WebSocketService', () => {
|
|||
|
||||
test('createRoom should emit create-room event', () => {
|
||||
WebsocketService.connect(ENV_VARIABLES.VITE_BACKEND_URL);
|
||||
WebsocketService.createRoom();
|
||||
WebsocketService.createRoom('test');
|
||||
expect(mockSocket.emit).toHaveBeenCalledWith('create-room');
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import React, { useEffect, useState } from 'react';
|
||||
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { ENV_VARIABLES } from '../../../constants';
|
||||
//import { ENV_VARIABLES } from '../../../constants';
|
||||
|
||||
import StudentModeQuiz from '../../../components/StudentModeQuiz/StudentModeQuiz';
|
||||
import TeacherModeQuiz from '../../../components/TeacherModeQuiz/TeacherModeQuiz';
|
||||
|
|
@ -29,14 +29,14 @@ const JoinRoom: React.FC = () => {
|
|||
const [isConnecting, setIsConnecting] = useState<boolean>(false);
|
||||
|
||||
useEffect(() => {
|
||||
handleCreateSocket();
|
||||
//handleCreateSocket();
|
||||
return () => {
|
||||
disconnect();
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleCreateSocket = () => {
|
||||
const socket = webSocketService.connect(ENV_VARIABLES.VITE_BACKEND_URL);
|
||||
const socket = webSocketService.connect(`/api/room/${roomName}/socket`);
|
||||
|
||||
socket.on('join-success', () => {
|
||||
setIsWaitingForTeacher(true);
|
||||
|
|
|
|||
|
|
@ -98,8 +98,9 @@ const Dashboard: React.FC = () => {
|
|||
setQuizzes(quizzes as QuizType[]);
|
||||
}
|
||||
else {
|
||||
console.log("show some quizes")
|
||||
console.log("show some quizzes")
|
||||
const folderQuizzes = await ApiService.getFolderContent(selectedFolder);
|
||||
console.log("folderQuizzes: ", folderQuizzes);
|
||||
setQuizzes(folderQuizzes as QuizType[]);
|
||||
|
||||
}
|
||||
|
|
@ -147,7 +148,7 @@ const Dashboard: React.FC = () => {
|
|||
setQuizzes(quizzes as QuizType[]);
|
||||
}
|
||||
else {
|
||||
console.log("show some quizes")
|
||||
console.log("show some quizzes")
|
||||
const folderQuizzes = await ApiService.getFolderContent(selectedFolder);
|
||||
setQuizzes(folderQuizzes as QuizType[]);
|
||||
|
||||
|
|
@ -292,7 +293,7 @@ const Dashboard: React.FC = () => {
|
|||
}
|
||||
|
||||
setQuizzes(quizzes as QuizType[]);
|
||||
|
||||
setSelectedFolder('');
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error deleting folder:', error);
|
||||
|
|
@ -317,9 +318,11 @@ const Dashboard: React.FC = () => {
|
|||
try {
|
||||
// folderId: string GET THIS FROM CURRENT FOLDER
|
||||
await ApiService.duplicateFolder(selectedFolder);
|
||||
// TODO set the selected folder to be the duplicated folder
|
||||
const userFolders = await ApiService.getUserFolders();
|
||||
setFolders(userFolders as FolderType[]);
|
||||
|
||||
const newlyCreatedFolder = userFolders[userFolders.length - 1] as FolderType;
|
||||
setSelectedFolder(newlyCreatedFolder._id);
|
||||
} catch (error) {
|
||||
console.error('Error duplicating folder:', error);
|
||||
}
|
||||
|
|
@ -401,7 +404,6 @@ const Dashboard: React.FC = () => {
|
|||
</div>
|
||||
|
||||
<div className='actions'>
|
||||
|
||||
<Tooltip title="Ajouter dossier" placement="top">
|
||||
<IconButton
|
||||
color="primary"
|
||||
|
|
@ -425,7 +427,7 @@ const Dashboard: React.FC = () => {
|
|||
> <ContentCopy /> </IconButton>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip title="Suprimer dossier" placement="top">
|
||||
<Tooltip title="Supprimer dossier" placement="top">
|
||||
<IconButton
|
||||
aria-label="delete"
|
||||
color="primary"
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import webSocketService, { AnswerReceptionFromBackendType } from '../../../servi
|
|||
import { QuizType } from '../../../Types/QuizType';
|
||||
|
||||
import './manageRoom.css';
|
||||
import { ENV_VARIABLES } from '../../../constants';
|
||||
//import { ENV_VARIABLES } from '../../../constants';
|
||||
import { StudentType, Answer } from '../../../Types/StudentType';
|
||||
import { Button } from '@mui/material';
|
||||
import LoadingCircle from '../../../components/LoadingCircle/LoadingCircle';
|
||||
|
|
@ -79,13 +79,19 @@ const ManageRoom: React.FC = () => {
|
|||
}
|
||||
};
|
||||
|
||||
const createWebSocketRoom = () => {
|
||||
const createWebSocketRoom = async () => {
|
||||
setConnectingError('');
|
||||
const socket = webSocketService.connect(ENV_VARIABLES.VITE_BACKEND_URL);
|
||||
const room = await ApiService.createRoom();
|
||||
const socket = webSocketService.connect(`/api/room/${room.id}/socket`);
|
||||
|
||||
socket.on('connect', () => {
|
||||
webSocketService.createRoom();
|
||||
webSocketService.createRoom(room.id);
|
||||
});
|
||||
|
||||
socket.on("error", (error) => {
|
||||
console.error("WebSocket server error:", error);
|
||||
});
|
||||
|
||||
socket.on('connect_error', (error) => {
|
||||
setConnectingError('Erreur lors de la connexion... Veuillez réessayer');
|
||||
console.error('WebSocket connection error:', error);
|
||||
|
|
@ -142,7 +148,7 @@ const ManageRoom: React.FC = () => {
|
|||
console.log('Quiz questions not found (cannot update answers without them).');
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// Update the students state using the functional form of setStudents
|
||||
setStudents((prevStudents) => {
|
||||
// print the list of current student names
|
||||
|
|
@ -150,7 +156,7 @@ const ManageRoom: React.FC = () => {
|
|||
prevStudents.forEach((student) => {
|
||||
console.log(student.name);
|
||||
});
|
||||
|
||||
|
||||
let foundStudent = false;
|
||||
const updatedStudents = prevStudents.map((student) => {
|
||||
console.log(`Comparing ${student.id} to ${idUser}`);
|
||||
|
|
@ -170,7 +176,7 @@ const ManageRoom: React.FC = () => {
|
|||
updatedAnswers = [...student.answers, newAnswer];
|
||||
}
|
||||
return { ...student, answers: updatedAnswers };
|
||||
}
|
||||
}
|
||||
return student;
|
||||
});
|
||||
if (!foundStudent) {
|
||||
|
|
|
|||
|
|
@ -145,6 +145,78 @@ class ApiService {
|
|||
return localStorage.removeItem("jwt");
|
||||
}
|
||||
|
||||
|
||||
//Socket Route
|
||||
|
||||
/**
|
||||
* Creates a new room.
|
||||
* @returns The room object if successful
|
||||
* @returns An error string if unsuccessful
|
||||
*/
|
||||
public async createRoom(): Promise<any> {
|
||||
try {
|
||||
const url: string = this.constructRequestUrl(`/room`);
|
||||
const headers = this.constructRequestHeaders();
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`La création de la salle a échoué. Status: ${response.status}`);
|
||||
}
|
||||
|
||||
const room = await response.json();
|
||||
return room;
|
||||
|
||||
} catch (error) {
|
||||
console.log("Error details: ", error);
|
||||
|
||||
if (error instanceof Error) {
|
||||
return error.message || 'Erreur serveur inconnue lors de la requête.';
|
||||
}
|
||||
|
||||
return `Une erreur inattendue s'est produite.`;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Deletes a room by its name.
|
||||
* @param roomName - The name of the room to delete.
|
||||
* @returns true if successful
|
||||
* @returns An error string if unsuccessful
|
||||
*/
|
||||
public async deleteRoom(roomName: string): Promise<any> {
|
||||
try {
|
||||
if (!roomName) {
|
||||
throw new Error(`Le nom de la salle est requis.`);
|
||||
}
|
||||
|
||||
const url = this.constructRequestUrl(`/room/${roomName}`);
|
||||
const headers = this.constructRequestHeaders();
|
||||
fetch(url, {
|
||||
method: 'DELETE',
|
||||
headers: headers,
|
||||
});
|
||||
|
||||
return true;
|
||||
|
||||
} catch (error) {
|
||||
console.log("Error details: ", error);
|
||||
|
||||
if (error instanceof Error) {
|
||||
return error.message || 'Erreur serveur inconnue lors de la requête.';
|
||||
}
|
||||
|
||||
return `Une erreur inattendue s'est produite.`;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// User Routes
|
||||
|
||||
/**
|
||||
|
|
@ -328,7 +400,7 @@ public async login(email: string, password: string): Promise<any> {
|
|||
const result: AxiosResponse = await axios.post(url, body, { headers: headers });
|
||||
|
||||
if (result.status !== 200) {
|
||||
throw new Error(`La supression du compte a échoué. Status: ${result.status}`);
|
||||
throw new Error(`La suppression du compte a échoué. Status: ${result.status}`);
|
||||
}
|
||||
|
||||
return true;
|
||||
|
|
@ -384,6 +456,7 @@ public async login(email: string, password: string): Promise<any> {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @returns folder array if successful
|
||||
* @returns A error string if unsuccessful,
|
||||
|
|
@ -410,7 +483,8 @@ public async login(email: string, password: string): Promise<any> {
|
|||
if (axios.isAxiosError(error)) {
|
||||
const err = error as AxiosError;
|
||||
const data = err.response?.data as { error: string } | undefined;
|
||||
return data?.error || 'Erreur serveur inconnue lors de la requête.';
|
||||
const url = err.config?.url || 'URL inconnue';
|
||||
return data?.error || `Erreur serveur inconnue lors de la requête (${url}).`;
|
||||
}
|
||||
|
||||
return `Une erreur inattendue s'est produite.`
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
// WebSocketService.tsx
|
||||
import { io, Socket } from 'socket.io-client';
|
||||
import apiService from './ApiService';
|
||||
|
||||
// Must (manually) sync these types to server/socket/socket.js
|
||||
|
||||
|
|
@ -21,10 +22,14 @@ class WebSocketService {
|
|||
private socket: Socket | null = null;
|
||||
|
||||
connect(backendUrl: string): Socket {
|
||||
// console.log(backendUrl);
|
||||
this.socket = io(`${backendUrl}`, {
|
||||
this.socket = io( '/',{
|
||||
path: backendUrl,
|
||||
transports: ['websocket'],
|
||||
reconnectionAttempts: 1
|
||||
autoConnect: true,
|
||||
reconnection: true,
|
||||
reconnectionAttempts: 10,
|
||||
reconnectionDelay: 10000,
|
||||
timeout: 20000,
|
||||
});
|
||||
return this.socket;
|
||||
}
|
||||
|
|
@ -37,9 +42,9 @@ class WebSocketService {
|
|||
}
|
||||
}
|
||||
|
||||
createRoom() {
|
||||
createRoom(roomName: string) {
|
||||
if (this.socket) {
|
||||
this.socket.emit('create-room');
|
||||
this.socket.emit('create-room', roomName || undefined);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -58,6 +63,8 @@ class WebSocketService {
|
|||
endQuiz(roomName: string) {
|
||||
if (this.socket) {
|
||||
this.socket.emit('end-quiz', { roomName });
|
||||
//Delete room in mongoDb, roomContainer will be deleted in cleanup
|
||||
apiService.deleteRoom(roomName);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
74
create-branch-image.bat
Normal file
74
create-branch-image.bat
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
@echo off
|
||||
setlocal EnableDelayedExpansion
|
||||
|
||||
:: Check if gh is installed
|
||||
where gh >nul 2>&1
|
||||
if %errorlevel% neq 0 (
|
||||
echo GitHub CLI not found. Installing...
|
||||
winget install --id GitHub.cli
|
||||
if %errorlevel% neq 0 (
|
||||
echo Failed to install GitHub CLI. Exiting...
|
||||
exit /b 1
|
||||
)
|
||||
echo GitHub CLI installed successfully.
|
||||
)
|
||||
|
||||
:: Check if user is authenticated
|
||||
gh auth status >nul 2>&1
|
||||
if %errorlevel% neq 0 (
|
||||
echo GitHub CLI not authenticated. Please authenticate...
|
||||
gh auth login
|
||||
if %errorlevel% neq 0 (
|
||||
echo Failed to authenticate. Exiting...
|
||||
exit /b 1
|
||||
)
|
||||
echo Authentication successful.
|
||||
)
|
||||
|
||||
:: Get the current branch name
|
||||
for /f "tokens=*" %%i in ('git rev-parse --abbrev-ref HEAD') do set BRANCH_NAME=%%i
|
||||
|
||||
:: Run the GitHub workflow with the current branch name
|
||||
echo Running GitHub workflow with branch %BRANCH_NAME%...
|
||||
gh workflow run 119194149 --ref %BRANCH_NAME%
|
||||
|
||||
:: Wait and validate workflow launch
|
||||
set /a attempts=0
|
||||
set /a max_attempts=12
|
||||
echo Waiting for workflow to start...
|
||||
|
||||
:wait_for_workflow
|
||||
timeout /t 15 >nul
|
||||
set /a attempts+=1
|
||||
|
||||
:: Get recent workflow run matching our criteria with in_progress status
|
||||
for /f "tokens=*" %%i in ('gh run list --branch %BRANCH_NAME% --status in_progress --limit 1 --json databaseId --jq ".[0].databaseId"') do set WORKFLOW_RUN_ID=%%i
|
||||
|
||||
if "%WORKFLOW_RUN_ID%"=="" (
|
||||
if !attempts! lss !max_attempts! (
|
||||
echo Attempt !attempts! of !max_attempts!: No running workflow found yet...
|
||||
goto wait_for_workflow
|
||||
) else (
|
||||
echo Timeout waiting for workflow to start running.
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
|
||||
echo Found running workflow ID: %WORKFLOW_RUN_ID%
|
||||
|
||||
:monitor_progress
|
||||
cls
|
||||
echo Workflow Progress:
|
||||
echo ----------------
|
||||
gh run view %WORKFLOW_RUN_ID% --json jobs --jq ".jobs[] | \"Job: \" + .name + \" - Status: \" + .status + if .conclusion != null then \" (\" + .conclusion + \")\" else \"\" end"
|
||||
echo.
|
||||
|
||||
:: Check if workflow is still running
|
||||
for /f "tokens=*" %%i in ('gh run view %WORKFLOW_RUN_ID% --json status --jq ".status"') do set CURRENT_STATUS=%%i
|
||||
if "%CURRENT_STATUS%" == "completed" (
|
||||
echo Workflow completed.
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
timeout /t 5 >nul
|
||||
goto monitor_progress
|
||||
137
docker-compose.local.yaml
Normal file
137
docker-compose.local.yaml
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
version: '3'
|
||||
|
||||
services:
|
||||
|
||||
frontend:
|
||||
container_name: frontend
|
||||
build:
|
||||
context: ./client
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "5173:5173"
|
||||
networks:
|
||||
- quiz_network
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -f http://localhost:$${PORT} || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
start_period: 5s
|
||||
retries: 6
|
||||
|
||||
backend:
|
||||
build:
|
||||
context: ./server
|
||||
dockerfile: Dockerfile
|
||||
container_name: backend
|
||||
networks:
|
||||
- quiz_network
|
||||
ports:
|
||||
- "3000:3000"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
environment:
|
||||
PORT: 3000
|
||||
MONGO_URI: "mongodb://mongo:27017/evaluetonsavoir"
|
||||
MONGO_DATABASE: evaluetonsavoir
|
||||
EMAIL_SERVICE: gmail
|
||||
SENDER_EMAIL: infoevaluetonsavoir@gmail.com
|
||||
EMAIL_PSW: 'vvml wmfr dkzb vjzb'
|
||||
JWT_SECRET: haQdgd2jp09qb897GeBZyJetC8ECSpbFJe
|
||||
depends_on:
|
||||
mongo:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -f http://localhost:$${PORT}/health || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
start_period: 5s
|
||||
retries: 6
|
||||
|
||||
quizroom: # Forces image to update
|
||||
build:
|
||||
context: ./quizRoom
|
||||
dockerfile: Dockerfile
|
||||
container_name: quizroom
|
||||
ports:
|
||||
- "4500:4500"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
networks:
|
||||
- quiz_network
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: ["CMD", "/usr/src/app/healthcheck.sh"]
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
start_period: 5s
|
||||
retries: 6
|
||||
|
||||
nginx:
|
||||
build:
|
||||
context: ./nginx
|
||||
dockerfile: Dockerfile
|
||||
container_name: nginx
|
||||
ports:
|
||||
- "80:80"
|
||||
depends_on:
|
||||
frontend:
|
||||
condition: service_healthy
|
||||
backend:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- quiz_network
|
||||
restart: always
|
||||
#environment:
|
||||
# - PORT=8000
|
||||
# - FRONTEND_HOST=frontend
|
||||
# - FRONTEND_PORT=5173
|
||||
# - BACKEND_HOST=backend
|
||||
# - BACKEND_PORT=3000
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget --spider http://0.0.0.0:$${PORT}/health || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
start_period: 5s
|
||||
retries: 6
|
||||
|
||||
mongo:
|
||||
image: mongo
|
||||
container_name: mongo
|
||||
ports:
|
||||
- "27017:27017"
|
||||
tty: true
|
||||
volumes:
|
||||
- mongodb_data:/data/db
|
||||
networks:
|
||||
- quiz_network
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 20s
|
||||
|
||||
watchtower:
|
||||
image: containrrr/watchtower
|
||||
container_name: watchtower
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
environment:
|
||||
- TZ=America/Montreal
|
||||
- WATCHTOWER_CLEANUP=true
|
||||
- WATCHTOWER_DEBUG=true
|
||||
- WATCHTOWER_INCLUDE_RESTARTING=true
|
||||
- WATCHTOWER_SCHEDULE=0 0 5 * * * # At 5 am everyday
|
||||
networks:
|
||||
- quiz_network
|
||||
restart: always
|
||||
|
||||
networks:
|
||||
quiz_network:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
mongodb_data:
|
||||
external: false
|
||||
|
|
@ -33,8 +33,21 @@ services:
|
|||
AUTHENTICATED_ROOMS: false
|
||||
volumes:
|
||||
- ./server/auth_config.json:/usr/src/app/serveur/config/auth_config.json
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
depends_on:
|
||||
- mongo
|
||||
- keycloak
|
||||
restart: always
|
||||
|
||||
quizroom:
|
||||
build:
|
||||
context: ./quizRoom
|
||||
dockerfile: Dockerfile
|
||||
container_name: quizroom
|
||||
ports:
|
||||
- "4500:4500"
|
||||
depends_on:
|
||||
- backend
|
||||
restart: always
|
||||
|
||||
# Ce conteneur sert de routeur pour assurer le bon fonctionnement de l'application
|
||||
|
|
@ -72,6 +85,23 @@ services:
|
|||
- WATCHTOWER_INCLUDE_RESTARTING=true
|
||||
- WATCHTOWER_SCHEDULE=0 0 5 * * * # At 5 am everyday
|
||||
restart: always
|
||||
|
||||
keycloak:
|
||||
container_name: keycloak
|
||||
image: quay.io/keycloak/keycloak:latest
|
||||
environment:
|
||||
KEYCLOAK_ADMIN: admin
|
||||
KEYCLOAK_ADMIN_PASSWORD: admin123
|
||||
KC_HEALTH_ENABLED: 'true'
|
||||
KC_FEATURES: preview
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ./oauth-tester/config.json:/opt/keycloak/data/import/realm-config.json
|
||||
command:
|
||||
- start-dev
|
||||
- --import-realm
|
||||
- --hostname-strict=false
|
||||
|
||||
volumes:
|
||||
mongodb_data:
|
||||
|
|
|
|||
1
documentation/.gitignore
vendored
Normal file
1
documentation/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
site
|
||||
2
documentation/deploy.py
Normal file
2
documentation/deploy.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
from ghp_import import ghp_import
|
||||
ghp_import('site', push=True, force=True)
|
||||
12
documentation/docs/developpeur/backend/api.md
Normal file
12
documentation/docs/developpeur/backend/api.md
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
## À Propos
|
||||
|
||||
Ce projet utilise Node.js Express pour créer un backend simple pour l'application.
|
||||
|
||||
## Routes API
|
||||
|
||||
Vous pouvez consulter toutes les routes utilisables du backend ici
|
||||
|
||||
* User : https://documenter.getpostman.com/view/32663805/2sA2rCU28v#e942a4f4-321c-465b-bf88-e6c1f1d6f6c8
|
||||
* Quiz : https://documenter.getpostman.com/view/32663805/2sA2rCU28v#732d980b-02fd-4807-b5bc-72725098b9b0
|
||||
* Folders : https://documenter.getpostman.com/view/32663805/2sA2rCU28v#49ecd432-ccfc-4c8a-8390-b3962f0d5fd7
|
||||
* Images : https://documenter.getpostman.com/view/32663805/2sA2rCU28v#58382180-d6f0-492d-80c3-e09de1c368b8
|
||||
384
documentation/docs/developpeur/backend/auth.md
Normal file
384
documentation/docs/developpeur/backend/auth.md
Normal file
|
|
@ -0,0 +1,384 @@
|
|||
# Authentification
|
||||
|
||||
## Introduction
|
||||
|
||||
Le but du module d'authentification est de pouvoir facilement faire des blocs de code permettant une authentification
|
||||
personalisée. Il est possible de le faire grâce à cette architecture. Pour la première version de cette fonctionalité,
|
||||
l'introduction de OIDC et de OAuth sont priorisé ainsi que la migration du module d'authentification simple.
|
||||
|
||||
|
||||
## Déconstruction simple de la structure
|
||||
La structure est la suivante :
|
||||
|
||||
Le AuthManager s'occupe de centraliser les requêtes d'authentification. Ce dernier initialise les autres modules et est
|
||||
la source de vérité dans les aspects liés à l'authentification. Les modules sont automatiquement chargés par
|
||||
l'utilisation de variables d'environment.
|
||||
|
||||
Le module s'occupe de créer les routes nécessaires pour son fonctionnement et de créer les utilisateurs. Ces modules
|
||||
vont appeller le AuthManager afin de confirmer leurs actions avec le login/register de celui-ci.
|
||||
|
||||
Dans le cas de modules plus complexe, tels que le module Passport, la chaine peut être prolongée afin de maintenir
|
||||
les actions centralisée . Chaque connecteur de PassportJs est initialisé par le module de PassportJs.
|
||||
|
||||
|
||||
## Besoins exprimés
|
||||
|
||||
|
||||
Modularité et généricité :
|
||||
|
||||
- Le système d'authentification doit être adaptable à diverses configurations, notamment pour répondre aux exigences
|
||||
spécifiques des différentes universités ou institutions.
|
||||
|
||||
Utilisation de différentes méthodes d'authentification :
|
||||
|
||||
- L'application doit permettre de gérer plusieurs fournisseurs d'authentification (SSO, LDAP, OAuth, etc.) de manière
|
||||
centralisée et flexible.
|
||||
|
||||
Facilité de configuration :
|
||||
|
||||
- Le système doit permettre une configuration simple et flexible, adaptée à différents environnements (développement,
|
||||
production, etc.).
|
||||
|
||||
Gestion des permissions :
|
||||
|
||||
- Il doit être possible de définir et de mapper facilement les permissions et les rôles des utilisateurs pour sécuriser
|
||||
l’accès aux différentes fonctionnalités de l’application.
|
||||
|
||||
Maintien de la connexion :
|
||||
|
||||
- Le système doit garantir la persistance de la connexion pendant toute la durée de l'utilisation de l'application
|
||||
(exemple : quiz), avec la possibilité de se reconnecter sans perte de données en cas de déconnexion temporaire.
|
||||
|
||||
## Recits utilisateurs pris en comptes
|
||||
|
||||
- En tant qu'utilisateur de projet FOSS, je veux que le module d'authentification soit modulaire et générique afin de
|
||||
l'adapter à mes besoins.
|
||||
- En tant qu'administrateur, je veux que les droits des utilisateurs soient inférés par l'authentificateur de l'établissement.
|
||||
- En tant qu'administrateur, je veux que la configuration des authentificateurs soit simple
|
||||
- En tant qu'administrateur, je veux configurer les connexions à partir de variables d'environnement ou fichier de config.
|
||||
- En tant qu'utilisateur, je veux que ma connexion soit stable.
|
||||
- En tant qu'utilisateur, je veux pouvoir me reconnecter à une salle s'il survient un problème de connexion.
|
||||
|
||||
## Diagrammes
|
||||
|
||||
### Structure
|
||||
```plantuml
|
||||
@startuml
|
||||
|
||||
package Backend {
|
||||
class AuthManager{
|
||||
+IAuthModule[] auths
|
||||
#userInfos
|
||||
|
||||
-load()
|
||||
-registerAuths()
|
||||
+showAuths()
|
||||
|
||||
+authStatus()
|
||||
+logIn(UserInfos)
|
||||
+register(UserInfos)
|
||||
+logOut()
|
||||
}
|
||||
|
||||
interface IAuthModule{
|
||||
+registerAuth()
|
||||
+authenticate()
|
||||
+register()
|
||||
+showAuth()
|
||||
}
|
||||
|
||||
class SimpleFormAuthModule{
|
||||
|
||||
}
|
||||
|
||||
class PassportAuthModule{
|
||||
IPassportProviderDefinition[] providers
|
||||
}
|
||||
|
||||
Interface IPassportProviderDefinition{
|
||||
+name
|
||||
+type
|
||||
}
|
||||
|
||||
class OAuthPassportProvider{
|
||||
+clientId
|
||||
+clientSecret
|
||||
+configUrl
|
||||
+authorizeUrl
|
||||
+tokenUrl
|
||||
+userinfoUrl
|
||||
+logoutUrl
|
||||
+JWKSUrl
|
||||
}
|
||||
|
||||
IAuthModule <|-- SimpleFormAuthModule
|
||||
IAuthModule <|-- PassportAuthModule
|
||||
IPassportProviderDefinition <|-- OAuthPassportProvider
|
||||
|
||||
AuthManager -> IAuthModule
|
||||
PassportAuthModule -> IPassportProviderDefinition
|
||||
}
|
||||
|
||||
package Frontend{
|
||||
class AuthDrawer{
|
||||
+IAuthVisual[] getAuthsVisual()
|
||||
+drawAuths()
|
||||
}
|
||||
|
||||
Interface IAuthVisual{
|
||||
+draw()
|
||||
}
|
||||
|
||||
class FormVisual{
|
||||
+FormInput[] formInputs
|
||||
}
|
||||
|
||||
interface FormInput{
|
||||
+name
|
||||
+label
|
||||
+type
|
||||
+value
|
||||
}
|
||||
|
||||
AuthDrawer -> IAuthVisual
|
||||
IAuthVisual <|-- FormVisual
|
||||
FormVisual -> FormInput
|
||||
}
|
||||
|
||||
@enduml
|
||||
```
|
||||
|
||||
|
||||
### Explication des communications : Passport Js
|
||||
```plantuml
|
||||
@startuml
|
||||
|
||||
box "Frontend"
|
||||
participant User
|
||||
Participant App
|
||||
end box
|
||||
|
||||
box "Backend"
|
||||
participant PassportAuthModule
|
||||
participant Db
|
||||
participant AuthManager
|
||||
end box
|
||||
|
||||
box "Auth Server"
|
||||
participant AuthServer
|
||||
end box
|
||||
|
||||
User -> App : Get auth page
|
||||
App -> User : auth page
|
||||
|
||||
User -> App : click OAuth button
|
||||
App -> User : redirect to OAuth
|
||||
|
||||
User -> AuthServer: Login
|
||||
AuthServer -> User: Redirect to Auth endpoint with token
|
||||
|
||||
User -> PassportAuthModule: Authenticate with token
|
||||
|
||||
PassportAuthModule -> AuthServer: get user info
|
||||
AuthServer -> PassportAuthModule: userInfo
|
||||
|
||||
alt login
|
||||
PassportAuthModule -> Db : fetch local userInfo
|
||||
Db->PassportAuthModule: userInfo
|
||||
PassportAuthModule -> PassportAuthModule: Merge userInfo definition
|
||||
PassportAuthModule -> Db : update user profile
|
||||
Db->PassportAuthModule: userInfo
|
||||
end
|
||||
|
||||
alt register
|
||||
PassportAuthModule -> Db : fetch local userInfo
|
||||
Db->PassportAuthModule: null
|
||||
PassportAuthModule -> Db : create user profile
|
||||
Db->PassportAuthModule: userInfo
|
||||
end
|
||||
|
||||
PassportAuthModule -> AuthManager : login(userInfos)
|
||||
|
||||
AuthManager -> User: Give refresh token + Redirect to page
|
||||
User -> App: get /
|
||||
App -> User: Show Authenticated /
|
||||
@enduml
|
||||
```
|
||||
|
||||
### Explication des communications : SimpleAuth
|
||||
```plantuml
|
||||
@startuml
|
||||
|
||||
box "Frontend"
|
||||
participant User
|
||||
Participant App
|
||||
end box
|
||||
|
||||
box "Backend"
|
||||
participant SimpleAuthModule
|
||||
participant Db
|
||||
participant AuthManager
|
||||
end box
|
||||
|
||||
User -> App : Get auth page
|
||||
App -> User : auth page
|
||||
|
||||
|
||||
alt Login
|
||||
User -> App : Send Login/Pass
|
||||
|
||||
App -> SimpleAuthModule: Send login/pass
|
||||
|
||||
SimpleAuthModule -> Db: get user info
|
||||
Db->SimpleAuthModule: user info
|
||||
SimpleAuthModule -> SimpleAuthModule: Validate Hash
|
||||
end
|
||||
|
||||
alt register
|
||||
User -> App : Send Username + Password + Email
|
||||
|
||||
App -> SimpleAuthModule: Send Username + Password + Email
|
||||
|
||||
SimpleAuthModule -> Db: get user info
|
||||
Db -> SimpleAuthModule : null
|
||||
|
||||
SimpleAuthModule -> Db: put user info
|
||||
end
|
||||
|
||||
SimpleAuthModule -> AuthManager: userInfo
|
||||
AuthManager -> User: Give refresh token + Redirect to page
|
||||
User -> App: get /
|
||||
App -> User: Show Authenticated /
|
||||
@enduml
|
||||
```
|
||||
|
||||
### Comment les boutons sont affichés
|
||||
```plantuml
|
||||
@startuml
|
||||
|
||||
box "FrontEnd"
|
||||
participant User
|
||||
Participant FrontEnd
|
||||
Participant AuthDrawer
|
||||
end box
|
||||
|
||||
box "BackEnd"
|
||||
participant API
|
||||
participant AuthManager
|
||||
participant Db
|
||||
participant IAuthModule
|
||||
end box
|
||||
|
||||
API -> API : load global configurations
|
||||
|
||||
create AuthManager
|
||||
API -> AuthManager : instanciate with auth configurations
|
||||
|
||||
|
||||
create IAuthModule
|
||||
AuthManager -> IAuthModule : instanciate array
|
||||
|
||||
loop For each auth in auths
|
||||
AuthManager -> IAuthModule : register
|
||||
IAuthModule -> API : register routes
|
||||
API -> IAuthModule : route registration confirmation
|
||||
IAuthModule -> AuthManager : module registration confirmation
|
||||
end
|
||||
|
||||
User -> FrontEnd : get login page
|
||||
|
||||
alt already logged in
|
||||
FrontEnd -> User: redirected to authenticated page
|
||||
end
|
||||
|
||||
FrontEnd -> AuthDrawer : get auth visual
|
||||
AuthDrawer -> API : get auth form data
|
||||
|
||||
API -> AuthManager : get auth form data
|
||||
|
||||
|
||||
loop For each auth in auths
|
||||
AuthManager -> IAuthModule : get form data
|
||||
IAuthModule -> AuthManager : form data
|
||||
end
|
||||
|
||||
AuthManager -> API : auth fom data
|
||||
API -> AuthDrawer : auth form data
|
||||
|
||||
AuthDrawer -> AuthDrawer : make auth html
|
||||
AuthDrawer -> FrontEnd : auth HTML
|
||||
FrontEnd -> User : show auth page
|
||||
|
||||
|
||||
@enduml
|
||||
```
|
||||
|
||||
### Comment les sessions sont conservées
|
||||
```plantuml
|
||||
@startuml
|
||||
box "Frontend"
|
||||
participant User
|
||||
Participant App
|
||||
end box
|
||||
|
||||
box "Backend"
|
||||
participant AuthManager
|
||||
participant IAuthModules
|
||||
end box
|
||||
|
||||
App -> AuthManager : send refresh token
|
||||
|
||||
AuthManager -> IAuthModules: ForEach check if logged
|
||||
IAuthModules -> AuthManager: is authenticated ?
|
||||
|
||||
alt one logged in
|
||||
AuthManager -> App : send new token
|
||||
end
|
||||
|
||||
alt all logged out
|
||||
AuthManager -> App : send error
|
||||
App -> App : destroy token
|
||||
App -> User : redirect to login page
|
||||
end
|
||||
|
||||
@enduml
|
||||
```
|
||||
|
||||
## Configuration des variables d'environnement
|
||||
|
||||
Example de configuration du fichier : `server/auth_config.json` :
|
||||
|
||||
```json
|
||||
{
|
||||
"auth": {
|
||||
"passportjs": // Module
|
||||
[
|
||||
{
|
||||
"gmatte": { // Nom du sous-module Passport
|
||||
"type": "oauth", // type
|
||||
"OAUTH_AUTHORIZATION_URL": "https://auth.gmatte.xyz/application/o/authorize/",
|
||||
"OAUTH_TOKEN_URL": "https://auth.gmatte.xyz/application/o/token/",
|
||||
"OAUTH_USERINFO_URL": "https://auth.gmatte.xyz/application/o/userinfo/",
|
||||
"OAUTH_CLIENT_ID": "--redacted--",
|
||||
"OAUTH_CLIENT_SECRET": "--Redacted--",
|
||||
"OAUTH_ADD_SCOPE": "groups", // scopes supplémentaire nécessaire pour le pivot
|
||||
"OAUTH_ROLE_TEACHER_VALUE": "groups_evaluetonsavoir-prof", // valeur de pivot afin de définir un enseignant
|
||||
"OAUTH_ROLE_STUDENT_VALUE": "groups_evaluetonsavoir" // valeur de pivot afin de définir un étudiant
|
||||
}
|
||||
},
|
||||
{
|
||||
"etsmtl":{
|
||||
"type":"oidc",
|
||||
"OIDC_CONFIG_URL":"https://login.microsoftonline.com/70aae3b7-9f3b-484d-8f95-49e8fbb783c0/v2.0/.well-known/openid-configuration",
|
||||
"OIDC_CLIENT_ID": "--redacted--",
|
||||
"OIDC_CLIENT_SECRET": "--redacted--",
|
||||
"OIDC_ADD_SCOPE": "",
|
||||
"OIDC_ROLE_TEACHER_VALUE": "groups_evaluetonsavoir-prof",
|
||||
"OIDC_ROLE_STUDENT_VALUE": "groups_evaluetonsavoir"
|
||||
}
|
||||
}
|
||||
],
|
||||
"simpleauth":{}
|
||||
}
|
||||
}
|
||||
```
|
||||
11
documentation/docs/developpeur/backend/base-de-donnees.md
Normal file
11
documentation/docs/developpeur/backend/base-de-donnees.md
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# Type de base de données
|
||||
La base de données est une MongoDB.
|
||||
|
||||
# Collections disponibles
|
||||
* Files : Ceci est la collection qui contient les différents quiz et leurs questions.
|
||||
* Folders : Ceci est la collection qui contient les dossiers qui servent à la gestion des différents quiz
|
||||
* Images : C'est dans cette collection que sont stockées les images utilisées dans les quiz
|
||||
* Users : Cette collection est utilisée pour la gestion des utilisateurs
|
||||
|
||||
# Information sur la création
|
||||
Lors du démarrage du projet, la base de données est créée automatiquement.
|
||||
43
documentation/docs/developpeur/backend/katex.md
Normal file
43
documentation/docs/developpeur/backend/katex.md
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
# KaTeX
|
||||
|
||||
KaTeX est le module qui s'occupe de formater les formules mathématiques selon la configuration donnée.
|
||||
|
||||
Les formules entourées de $$ s'afficheront centrées sur leur propre ligne
|
||||
|
||||
`.replace(/\$\$(.*?)\$\$/g, (_, inner) => katex.renderToString(inner, { displayMode: true }))`
|
||||
|
||||
alors que les formules entourées de $ s'afficheront sur la même ligne
|
||||
|
||||
`.replace(/\$(.*?)\$/g, (_, inner) => katex.renderToString(inner, { displayMode: false }))`
|
||||
|
||||
La configuration du formatage peut être trouvée dans le fichier TextType.ts situé dans le dossier
|
||||
EvalueTonSavoir/client/src/components/GiftTemplate/templates
|
||||
|
||||
C'est aussi dans ce fichier que le format markdown est pris en charge.
|
||||
|
||||
## Éditeur de quiz
|
||||
Pour l'affichage dans l'éditeur de quiz, on peut retrouver la classe TextType être appliquée sur différents éléments
|
||||
du dossier templates, par exemple la classe Numerical.ts.
|
||||
|
||||
On peut voir ici que le TextType est appliqué sur le contenu de la question:
|
||||
```typescript
|
||||
<p style="${ParagraphStyle(state.theme)}">${TextType({text: stem })}</p>
|
||||
```
|
||||
|
||||
Selon ce qui avait été écrit dans la question, la classe s'occupera de formatter les bonnes sections.
|
||||
|
||||
## Affichage de questions
|
||||
|
||||
Le module React-latex était utilisé pour le formatage des questions durant un quiz, mais cela a apporté un problème
|
||||
de disparité d'affichage entre la création et l'affichage des questions avec des formules mathématiques.
|
||||
Les classes affichant les questions durant un quiz peuvent utiliser ce format, mais avec une manipulation de plus.
|
||||
|
||||
Les variables contenant la question doivent d'abord avoir un type TextFormat pour pouvoir faire appel à la classe qui
|
||||
s'occupe du format sous le module KaTeX. Puis, étant sur un environnement React, il faut utiliser la propriété
|
||||
dangerouslySetInnerHTML pour afficher la question correctement.
|
||||
|
||||
|
||||
`<div dangerouslySetInnerHTML={{ __html: TextType({text: questionContent}) }} />
|
||||
`
|
||||
|
||||
Ce type de manipulation peut être utilisé dans d'autres environnements React si on veut éviter d'utiliser React-latex.
|
||||
54
documentation/docs/developpeur/backend/quiz.md
Normal file
54
documentation/docs/developpeur/backend/quiz.md
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
# Example de Quiz
|
||||
|
||||
```gift
|
||||
//-----------------------------------------//
|
||||
// Examples from gift/format.php.
|
||||
//-----------------------------------------//
|
||||
|
||||
Who's buried in Grant's tomb?{~Grant ~Jefferson =no one}
|
||||
|
||||
Grant is {~buried =entombed ~living} in Grant's tomb.
|
||||
|
||||
Grant is buried in Grant's tomb.{FALSE}
|
||||
|
||||
Who's buried in Grant's tomb?{=no one =nobody}
|
||||
|
||||
When was Ulysses S. Grant born?{#1822:5}
|
||||
|
||||
Match the following countries with their corresponding capitals. {
|
||||
=Canada -> Ottawa
|
||||
=Italy -> Rome
|
||||
=Japan -> Tokyo
|
||||
=India -> New Delhi
|
||||
####It's good to know the capitals
|
||||
}
|
||||
|
||||
//-----------------------------------------//
|
||||
// More complicated examples.
|
||||
//-----------------------------------------//
|
||||
|
||||
::Grant's Tomb::Grant is {
|
||||
~buried#No one is buried there.
|
||||
=entombed#Right answer!
|
||||
~living#We hope not!
|
||||
} in Grant's tomb.
|
||||
|
||||
Difficult multiple choice question.{
|
||||
~wrong answer #comment on wrong answer
|
||||
~%50%half credit answer #comment on answer
|
||||
=full credit answer #well done!}
|
||||
|
||||
::Jesus' hometown (Short answer ex.):: Jesus Christ was from {
|
||||
=Nazareth#Yes! That's right!
|
||||
=%75%Nazereth#Right, but misspelled.
|
||||
=%25%Bethlehem#He was born here, but not raised here.
|
||||
}.
|
||||
|
||||
//this comment will be ignored by the filter
|
||||
::Numerical example::
|
||||
When was Ulysses S. Grant born? {#
|
||||
=1822:0 #Correct! 100% credit
|
||||
=%50%1822:2 #He was born in 1822.
|
||||
You get 50% credit for being close.
|
||||
}
|
||||
```
|
||||
|
|
@ -0,0 +1,146 @@
|
|||
{
|
||||
"openapi": "3.0.2",
|
||||
"info": {
|
||||
"title": "Room API"
|
||||
},
|
||||
"servers":[
|
||||
{
|
||||
"url": "http://localhost",
|
||||
"description": "Via Docker"
|
||||
},
|
||||
{
|
||||
"url": "http://localhost:3000",
|
||||
"description": "Via npm"
|
||||
}
|
||||
],
|
||||
"security": [
|
||||
{
|
||||
"bearerAuth": []
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"/api/room": {
|
||||
"get": {
|
||||
"summary": "Get all rooms",
|
||||
"description": "Returns a list of rooms",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Room"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"summary": "Create a new room",
|
||||
"description": "Creates a new room, returns the created room",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Created",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Room"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/room/{roomId}": {
|
||||
"get": {
|
||||
"summary": "Get a room by id",
|
||||
"description": "Returns a room by id",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "roomId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Room"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"summary": "Delete a room by id",
|
||||
"description": "Deletes a room by id",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "roomId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"securitySchemes": {
|
||||
"bearerAuth": {
|
||||
"type": "http",
|
||||
"scheme": "bearer",
|
||||
"bearerFormat": "JWT"
|
||||
}
|
||||
},
|
||||
"schemas": {
|
||||
"Room": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"default": "autoincrement"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"nbStudents": {
|
||||
"type": "integer",
|
||||
"default": 0
|
||||
},
|
||||
"mustBeCleaned": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"host"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
193
documentation/docs/developpeur/backend/salle-de-quiz.md
Normal file
193
documentation/docs/developpeur/backend/salle-de-quiz.md
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
# Salles de Quiz
|
||||
|
||||
## Introduction
|
||||
|
||||
Les salles de quiz ont été extraites dans leur propre conteneur afin de limiter les dégâts liés soit à une
|
||||
surutilisation d'une salle soit à une attaque sur le logiciel.
|
||||
|
||||
En effet, le découplement permet a un quiz de:
|
||||
|
||||
- Survivre même si le backend est non-fonctionnel
|
||||
- Mourir sans entrainer toute l'application avec elle
|
||||
- Créer/Supprimer des salles automatiquement dépendant de la demande
|
||||
|
||||
Pour effectuer ceci, il faut faire une petite gymnastique. Il y a une route dans l'api servant à gérer les salles.
|
||||
Lorsqu'un utilisateur demande le socket d'une salle : "/api/rooms/{id}/socket", la requête rebondit sur le proxy Nginx.
|
||||
Celui-ci contacte le backend afin d'obtenir l'adresse de l'ordinateur auquel envoyer la requête et redirige le socket
|
||||
vers cette adresse.
|
||||
|
||||
## Déconstruction simple de la structure
|
||||
|
||||
Un module supplémentaire a été ajouté à la structure : Rooms.
|
||||
|
||||
L'objet `room` est la définition d'une salle de façon minimaliste. Cette définition est aggrandie avec l'information
|
||||
récoltée du "provider".
|
||||
Le `provider` est le système gérant les différentes salles. Dans l'implémentation effectuée, il s'agit de docker.
|
||||
|
||||
Lorsque l'api des salles est instantié, celui-ci est lié avec un "provider", définissant comment les salles seront créées.
|
||||
L'api des salles permet de les ajouter, les supprimer, et les consulter.
|
||||
|
||||
L'api lance deux "jobs":
|
||||
|
||||
- Une vérification de l'état de santé des salles. Celle-ci roule tous les 10 secondes et met a jour les salles.
|
||||
- Une suppression des salles. Celle-ci roule tous les 30 secondes et supprimme automatiquement les salles ayant la
|
||||
mention de suppression.
|
||||
|
||||
## Besoins exprimés
|
||||
|
||||
Fiabilite :
|
||||
|
||||
- Nous voulons s'assurer qu'il soit possible d'avoir un grand nombre d'élèves présent sans qu'il y ait des problèmes de
|
||||
déconnexions
|
||||
- Nous voulons que le temps de réponse soit faible
|
||||
- Nous voulons que le système soit capable de fonctionner de facon indépendante
|
||||
|
||||
## Recis utilisateurs pris en comptes
|
||||
|
||||
- En tant qu'enseignant, je veux que tout mes élèves soient capable de se connecter à la salle de classe rapidement
|
||||
- En tant qu'enseignant, je veux que la salle de quiz puisse survivre des pannes liées aux autres modules de l'aplication
|
||||
- En tant qu'administrateur, je veux que les salles soient indépendantes et n'impactent pas les performances des autres salles
|
||||
- En tant qu'administrateur, je veux que les salles puissent être hébergées séparément du projet
|
||||
|
||||
## Diagrammes
|
||||
|
||||
### Structure
|
||||
```plantuml
|
||||
@startuml
|
||||
class Room{
|
||||
+id
|
||||
+name
|
||||
+host
|
||||
+nbStudents
|
||||
+mustBeCleaned
|
||||
}
|
||||
|
||||
class RoomRepository {
|
||||
+get(id)
|
||||
+create(room)
|
||||
+delete(id)
|
||||
+update(room,id)
|
||||
+getAll()
|
||||
}
|
||||
|
||||
class RoomController {
|
||||
+setupRoom(options)
|
||||
+deleteRoom(roomId)
|
||||
+listRooms()
|
||||
+getRoomStatus(roomId)
|
||||
+updateRoom(room,roomId)
|
||||
}
|
||||
|
||||
class RoomRouter{
|
||||
+ / : GET
|
||||
+ /:id : GET
|
||||
+ / : POST
|
||||
+ /:id : PUT
|
||||
+ /:id : DELETE
|
||||
}
|
||||
|
||||
class BaseRoomProvider {
|
||||
+createRoom(roomid,options)
|
||||
+deleteRoom(roomId)
|
||||
+getRoomInfo(roomId)
|
||||
+getRoomStatus(roomId)
|
||||
+listRooms()
|
||||
-cleanup()
|
||||
-syncInstantiatedRooms()
|
||||
#updateRoomInfos()
|
||||
}
|
||||
|
||||
class DockerRoomProvider
|
||||
circle Dockerode
|
||||
|
||||
|
||||
Room - RoomRepository
|
||||
BaseRoomProvider o-- RoomRepository
|
||||
DockerRoomProvider --|> BaseRoomProvider
|
||||
DockerRoomProvider -left- Dockerode
|
||||
Dockerode o-- QuizRoom
|
||||
RoomController o-- BaseRoomProvider
|
||||
RoomRouter o-- RoomController
|
||||
|
||||
class QuizRoom{
|
||||
+/health: GET
|
||||
+create-room()
|
||||
+join-room()
|
||||
+next-question()
|
||||
+launch-student-mode()
|
||||
+end-quiz()
|
||||
+submit-answers()
|
||||
-disconnect()
|
||||
}
|
||||
@enduml
|
||||
```
|
||||
Remarque: Les signatures de fonctions semblent un peu partout car il y a des fonctions de classes standard, des appels
|
||||
HTTPS et des appels de sockets dans le même diagramme.
|
||||
|
||||
### Diagramme de séquence démontrant les communications
|
||||
```plantuml
|
||||
@startuml
|
||||
actor Teacher
|
||||
actor Student
|
||||
entity Nginx
|
||||
entity Frontend
|
||||
entity Api
|
||||
entity Docker
|
||||
entity Database
|
||||
|
||||
group Quiz Creation
|
||||
Teacher -> Frontend : Create a quizroom
|
||||
Frontend -> Api : Create a quizroom
|
||||
Api -> Docker : Create a quizroom
|
||||
Docker -> QuizRoom **
|
||||
QuizRoom -> Docker : creation successful
|
||||
Docker -> Api : Creation Successful
|
||||
|
||||
loop every seconds until healthy or 30s:
|
||||
Api -> QuizRoom : Checking Health via /health
|
||||
QuizRoom -> Api : Doesn't answer, answer healthy or unhealthy
|
||||
end
|
||||
|
||||
Api -> Database : Create Room
|
||||
Database -> Api : Room created
|
||||
Api -> Teacher : Route to room socket
|
||||
end
|
||||
|
||||
group Quiz Joining:
|
||||
Teacher -> Nginx : Join Room
|
||||
Nginx -> Api : Get room infos from id
|
||||
Api -> Nginx : Ip:port of room
|
||||
Nginx -> QuizRoom: Give teacher's connexion
|
||||
|
||||
Student -> Frontend: Join Room X
|
||||
Frontend -> Nginx : Join Room X
|
||||
Nginx -> Api : Get room infos from id
|
||||
Api -> Nginx : Ip:port of room
|
||||
Nginx -> QuizRoom: Give student's connexion
|
||||
|
||||
QuizRoom -> QuizRoom : Give Quiz ... (Multiple actions)
|
||||
|
||||
Student -> QuizRoom: Disconnect
|
||||
Teacher -> QuizRoom: Disconect
|
||||
end
|
||||
|
||||
group QuizManagement (Every 10 seconds)
|
||||
Api -> QuizRoom : Checking number of people in the room
|
||||
QuizRoom -> Api : Number of people (0) or Unhealthy
|
||||
Api -> Database : Mark room to deletion
|
||||
end
|
||||
|
||||
group Quiz Deletion (Every 30 seconds)
|
||||
Api -> Database : Give all rooms marked for deletion
|
||||
Database -> Api : rooms
|
||||
Api -> Docker : delete rooms
|
||||
Docker -> QuizRoom : delete
|
||||
Docker -> Api : Deleted
|
||||
end
|
||||
|
||||
@enduml
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
<swagger-ui src="salle-de-quiz-swagger.json"/>
|
||||
77
documentation/docs/developpeur/deploiements/ansible.md
Normal file
77
documentation/docs/developpeur/deploiements/ansible.md
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
# Documentation de déploiement avec Ansible
|
||||
|
||||
Ce guide explique comment utiliser **Ansible** pour déployer facilement le projet **ÉvalueTonSavoir**.
|
||||
|
||||
## Prérequis
|
||||
|
||||
### Système requis
|
||||
- Un ordinateur sous **Linux** ou **Mac**.
|
||||
- Pour **Windows**, installez [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install) afin d'exécuter un environnement Ubuntu.
|
||||
|
||||
### Installation d'Ansible
|
||||
1. **Sur Ubuntu (ou WSL2)** :
|
||||
Utilisez le gestionnaire de paquets `apt` :
|
||||
```bash
|
||||
sudo apt update
|
||||
sudo apt install ansible-core
|
||||
```
|
||||
2. **Autres systèmes** :
|
||||
Consultez la [documentation officielle d'Ansible](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) pour connaître les étapes spécifiques à votre système.
|
||||
|
||||
### Installation de Docker et Docker Compose
|
||||
- Suivez la [documentation Docker officielle](https://docs.docker.com/get-docker/) pour installer Docker.
|
||||
- Docker Compose est inclus comme plugin Docker dans les versions récentes.
|
||||
|
||||
## Téléchargement des fichiers nécessaires
|
||||
|
||||
1. Clonez le dépôt Git contenant les fichiers Ansible :
|
||||
```bash
|
||||
git clone https://github.com/ets-cfuhrman-pfe/EvalueTonSavoir
|
||||
```
|
||||
2. Naviguez vers le répertoire `ansible` :
|
||||
```bash
|
||||
cd EvalueTonSavoir/ansible
|
||||
```
|
||||
|
||||
## Déploiement avec Ansible
|
||||
|
||||
### Commande de déploiement
|
||||
Pour déployer l'application, exécutez la commande suivante dans le répertoire contenant le fichier `deploy.yml` :
|
||||
```bash
|
||||
ansible-playbook -i inventory.ini deploy.yml
|
||||
```
|
||||
|
||||
### Structure des fichiers utilisés
|
||||
- **`inventory.ini`** : Défini les cibles du déploiement. Par défaut, il est configuré pour un déploiement local.
|
||||
- **`deploy.yml`** : Playbook contenant les instructions pour installer, configurer et déployer l'application.
|
||||
|
||||
### Étapes effectuées par Ansible
|
||||
1. **Installation des dépendances** :
|
||||
- Vérifie et installe Docker si nécessaire.
|
||||
2. **Démarrage des services** :
|
||||
- Télécharge le fichier `docker-compose.yaml` depuis le dépôt Github.
|
||||
- Lance les services définis avec Docker Compose.
|
||||
3. **Vérification des conteneurs** :
|
||||
- Vérifie que les conteneurs sont en cours d'exécution et fonctionnent correctement.
|
||||
|
||||
## Vérification du déploiement
|
||||
|
||||
Une fois le playbook exécuté, Ansible :
|
||||
1. Installe Docker et ses dépendances.
|
||||
2. Télécharge et configure le projet.
|
||||
3. Lance les services avec Docker Compose.
|
||||
4. Vérifie que les services sont accessibles localement.
|
||||
|
||||
Pour tester l'application, utilisez la commande suivante :
|
||||
```bash
|
||||
curl http://localhost:8080
|
||||
```
|
||||
Un code de réponse `200 OK` indiquera que le déploiement est réussi.
|
||||
|
||||
---
|
||||
|
||||
## Résumé
|
||||
|
||||
Le déploiement avec **Ansible** simplifie la gestion des configurations et l'installation des dépendances nécessaires
|
||||
pour le projet **ÉvalueTonSavoir**. Avec cette méthode, vous pouvez déployer rapidement l'application dans un
|
||||
environnement local tout en assurant une configuration cohérente.
|
||||
63
documentation/docs/developpeur/deploiements/local.md
Normal file
63
documentation/docs/developpeur/deploiements/local.md
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
## Prérequis
|
||||
|
||||
- Assurez-vous d'avoir Node JS installé en téléchargeant la dernière version depuis [https://nodejs.org/en](https://nodejs.org/en).
|
||||
|
||||
- Ensuite, assurez-vous d'avoir accès à un serveur MongoDB de développement
|
||||
|
||||
> Pour plus d'informations sur la base de données, veuillez consulter la documentation [[ici|Base-de-données]]
|
||||
|
||||
- Cloner le projet avec la commande suivante :
|
||||
```
|
||||
git clone https://github.com/ets-cfuhrman-pfe/EvalueTonSavoir.git
|
||||
```
|
||||
|
||||
## Étape 1 - Démarrage du backend
|
||||
|
||||
1. Naviguez vers le répertoire du projet en utilisant la commande suivante :
|
||||
```
|
||||
cd .\EvalueTonSavoir\server\
|
||||
```
|
||||
|
||||
2. Assurez-vous de créer le fichier .env et d'y ajouter les paramètres appropriés. Vous pouvez vous inspirer du fichier
|
||||
.env.example pour connaître les paramètres nécessaires.
|
||||
|
||||
[[Voir ici la documentation des configurations|Configurations]]
|
||||
|
||||
3. Installez les dépendances avec la commande suivante :
|
||||
```
|
||||
npm install
|
||||
```
|
||||
|
||||
4. Démarrez le serveur en utilisant la commande suivante :
|
||||
```
|
||||
npm run dev
|
||||
```
|
||||
|
||||
5. Ouvrez votre navigateur et accédez à l'URL indiquée dans la console (par exemple, http://localhost:4400).
|
||||
|
||||
## Étape 2 - Démarrage du frontend
|
||||
|
||||
1. Naviguez vers le répertoire du projet en utilisant la commande suivante :
|
||||
```
|
||||
cd .\EvalueTonSavoir\client\
|
||||
```
|
||||
> [!WARNING]
|
||||
> Assurez-vous que le backend est en cours d'exécution avant de démarrer le frontend. \
|
||||
> Notez également l'URL du serveur pour le fichier `.env`.
|
||||
|
||||
2. Assurez-vous de créer le fichier .env et d'y ajouter les paramètres appropriés. Vous pouvez vous inspirer du fichier
|
||||
.env.example pour connaître les paramètres nécessaires.
|
||||
|
||||
[[Voir ici la documentation des configurations|Configurations]]
|
||||
|
||||
3. Installez les dépendances avec la commande suivante :
|
||||
```
|
||||
npm install
|
||||
```
|
||||
|
||||
4. Démarrez le frontend avec la commande suivante :
|
||||
```
|
||||
npm run dev
|
||||
```
|
||||
|
||||
5. Ouvrez votre navigateur et accédez à l'URL indiquée dans la console (par exemple, http://localhost:5173/).
|
||||
61
documentation/docs/developpeur/deploiements/opentofu.md
Normal file
61
documentation/docs/developpeur/deploiements/opentofu.md
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
# Documentation de déploiement avec OpenTofu
|
||||
|
||||
Ce guide explique comment **OpenTofu** est utilisé pour déployer facilement le projet **ÉvalueTonSavoir**.
|
||||
|
||||
## Déploiement
|
||||
|
||||
### Étapes à réaliser pour faire le déploiement
|
||||
|
||||
Pour déployer à l'aide de OpenTofu, il suffit de suivre les étapes du fichier [README.md](https://github.com/ets-cfuhrman-pfe/EvalueTonSavoir/blob/main/opentofu/README.md).
|
||||
|
||||
### Structure des fichiers utilisés pour le déploiement sur Azure
|
||||
- **`app.tf`** : Défini les configurations de la machine virtuelle qui exécute l'application.
|
||||
- **`database.tf`** : Défini les configurations de la base de données.
|
||||
- **`main.tf`** : Défini le fournisseur utilisé pour le déploiement, dans ce cas-ci Azure.
|
||||
- **`network.tf`** : Défini les configurations réseau et les règles de sécurité réseau.
|
||||
- **`resource_group.tf`** : Défini les configurations du groupes de ressources dans Azure.
|
||||
- **`storage.tf`** : Défini les configurations pour stocker et pouvoir utiliser le fichier auth_config.json.
|
||||
- **`terraform.tfvars`** : Défini les valeurs des variables à utiliser lors du déploiement.
|
||||
- **`variables.tf`** : Défini toutes les variables qui sont utilisées lors du déploiement.
|
||||
|
||||
### Étapes effectuées par OpenTofu
|
||||
1. **Création des éléments du réseau** :
|
||||
- Création d'un réseau virtuel.
|
||||
- Création d'un sous-réseau.
|
||||
- Création d'une adresse ip publique.
|
||||
- Création d'un groupe de sécurité réseau.
|
||||
- Création d'une interface réseau.
|
||||
2. **Création de la base de données** :
|
||||
- Création du serveur de base de données.
|
||||
- Création de la base de données (collection puisqu'on utilise MongoDB)
|
||||
3. **Création de la machine virtuelle** :
|
||||
- Création de la machine virtuelle.
|
||||
- Installation de Docker
|
||||
- Récupération du fichier `docker-compose.yaml` depuis le dépôt Github.
|
||||
- Exécution de l'application avec le fichier `docker-compose.yaml`
|
||||
|
||||
## Résumé
|
||||
|
||||
Le déploiement avec **OpenTofu** simplifie la gestion des éléments nécessaires pour déployer le projet
|
||||
**ÉvalueTonSavoir**. dans l'infonuagique. Avec cette méthode, vous pouvez déployer rapidement et facilement
|
||||
l'application dans un environnement infonuagique.
|
||||
|
||||
## Diagramme de sequence
|
||||
|
||||
```plantuml
|
||||
@startuml
|
||||
|
||||
actor Administrator
|
||||
participant "Control Machine" as control_machine
|
||||
participant "Azure" as azure
|
||||
|
||||
Administrator -> control_machine: "Se connecte à Azure"
|
||||
Administrator -> control_machine: "Lancer le déploiement avec OpenTofu"
|
||||
control_machine -> azure: "Crée les éléments réseaux"
|
||||
control_machine -> azure: "Crée la base de données"
|
||||
control_machine -> azure: "Crée la machine virtuelle qui exécute l'application"
|
||||
control_machine <- azure: "OpenTofu retourne le résultat (success/échec)"
|
||||
Administrator <- control_machine: "OpenTofu retourne le résultat (success/échec)"
|
||||
|
||||
@enduml
|
||||
```
|
||||
230
documentation/docs/developpeur/deploiements/prod.md
Normal file
230
documentation/docs/developpeur/deploiements/prod.md
Normal file
|
|
@ -0,0 +1,230 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
Nous avons choisi d'exécuter les composantes de cette application avec Docker, car cela simplifie le processus de
|
||||
gestion des processus d'application.
|
||||
|
||||
Voici un diagramme de déploiement expliquant la relation des composantes et comment les images Docker sont créées et
|
||||
déployées dans un serveur.
|
||||
|
||||
```plantuml
|
||||
@startuml
|
||||
skinparam style strictuml
|
||||
skinparam component {
|
||||
BackgroundColor<<Container>> LightBlue
|
||||
BackgroundColor<<Image>> lightgreen
|
||||
}
|
||||
node "evalsa.etsmtl.ca" {
|
||||
artifact "docker-compose.yml" as compose
|
||||
node "Docker" as docker {
|
||||
[evaluetonsavoir-routeur\n(nginx)] <<Container>> as ROC
|
||||
[evaluetonsavoir-frontend\n(vite + TypeScript React)] <<Container>> as FEC
|
||||
component "evaluetonsavoir-backend\n(Express, Javascript)" <<Container>> as BEC {
|
||||
port API_REST
|
||||
port SOCKET_SALLE
|
||||
}
|
||||
}
|
||||
database "MongoDB" as BD
|
||||
BD -- BEC
|
||||
|
||||
}
|
||||
|
||||
node "Docker hub" {
|
||||
component evaluetonsavoir-routeur <<image>> as RO {
|
||||
}
|
||||
component evaluetonsavoir-frontend <<image>> as FE {
|
||||
}
|
||||
component evaluetonsavoir-backend <<image>> as BE {
|
||||
}
|
||||
}
|
||||
|
||||
node "GitHub" {
|
||||
artifact "routeur-deploy.yml" <<action>> as RO_D
|
||||
artifact "backend-deploy.yml" <<action>> as BE_D
|
||||
artifact "frontend-deploy.yml" <<action>> as FE_D
|
||||
}
|
||||
|
||||
BE <-- BE_D : on commit
|
||||
FE <-- FE_D
|
||||
RO <-- RO_D
|
||||
|
||||
BEC <.. BE : "pull à 5h du matin"
|
||||
FEC <.. FE
|
||||
ROC <.. RO
|
||||
|
||||
node "Navigateur moderne\n(Windows/Android)" as browser {
|
||||
[React App] as RA_NAV
|
||||
}
|
||||
|
||||
RA_NAV <.. FEC : chargée à partir des pages web
|
||||
RA_NAV ..> API_REST : API REST
|
||||
RA_NAV <..> SOCKET_SALLE : WebSocket
|
||||
@enduml
|
||||
```
|
||||
|
||||
## Prérequis
|
||||
|
||||
Les STI nous a fourni un serveur avec les spécifications suivantes :
|
||||
|
||||
- Ubuntu 22.04 LTS
|
||||
- CPU : 4 cœurs
|
||||
- RAM : 8 Go
|
||||
- HDD : 100 Go
|
||||
- Certificat SSL
|
||||
|
||||
Les STI ont déjà effectué la configuration initiale de la machine selon leurs normes de mise en place d'un serveur pour
|
||||
assurer la bonne maintenance et sécurité au sein de leur infrastructure. Cette configuration inclut un utilisateur non root.
|
||||
|
||||
Vous aurez également besoin d'un compte Docker Hub, ou vous pouvez simplement créer une PR sur le projet principal et
|
||||
elle sera déployée automatiquement.
|
||||
|
||||
## Étape 1 - Installation de Docker
|
||||
|
||||
Connectez-vous avec les informations d'identification de l'ETS :
|
||||
```
|
||||
ssh <email>@<IP>
|
||||
```
|
||||
|
||||
Tout d'abord, mettez à jour votre liste existante de packages :
|
||||
```
|
||||
sudo apt update
|
||||
```
|
||||
|
||||
Ensuite, installez quelques packages prérequis qui permettent à apt d'utiliser des packages via HTTPS :
|
||||
> [!WARNING]
|
||||
> Si vous voyez l'erreur suivante, ARRÊTEZ. Contactez les STI pour résoudre le problème. \
|
||||
> `Waiting for cache lock: Could not get lock /var/lib/dpkg/lock-frontend. It is held by process 10703 (apt)`
|
||||
```
|
||||
sudo apt install apt-transport-https ca-certificates curl software-properties-common
|
||||
```
|
||||
|
||||
Ajoutez la clé GPG du référentiel Docker officiel à votre système :
|
||||
```
|
||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
|
||||
```
|
||||
|
||||
Ajoutez le référentiel Docker aux sources APT :
|
||||
```
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
```
|
||||
|
||||
Mettez à jour à nouveau votre liste existante de packages pour que l'ajout soit reconnu :
|
||||
```
|
||||
sudo apt update
|
||||
```
|
||||
|
||||
Assurez-vous que vous vous apprêtez à installer à partir du référentiel Docker plutôt que du référentiel Ubuntu par défaut :
|
||||
```
|
||||
apt-cache policy docker-ce
|
||||
```
|
||||
|
||||
Vous verrez une sortie comme celle-ci, bien que le numéro de version pour Docker puisse être différent :
|
||||
```Output
|
||||
docker-ce:
|
||||
Installed: (none)
|
||||
Candidate: 5:26.0.0-1~ubuntu.22.04~jammy
|
||||
Version table:
|
||||
5:26.0.0-1~ubuntu.22.04~jammy 500
|
||||
500 https://download.docker.com/linux/ubuntu jammy/stable amd64 Packages
|
||||
5:25.0.5-1~ubuntu.22.04~jammy 500
|
||||
500 https://download.docker.com/linux/ubuntu jammy/stable amd64 Packages
|
||||
...
|
||||
```
|
||||
|
||||
Installez Docker :
|
||||
```
|
||||
sudo apt install docker-ce
|
||||
```
|
||||
|
||||
Vérifiez que Docker fonctionne :
|
||||
```
|
||||
sudo systemctl status docker
|
||||
```
|
||||
|
||||
La sortie devrait être similaire à ce qui suit, montrant que le service est actif et en cours d'exécution :
|
||||
```Output
|
||||
● docker.service - Docker Application Container Engine
|
||||
Loaded: loaded (/lib/systemd/system/docker.service; enabled; vendor preset: enabled)
|
||||
Active: active (running) since Fri 2024-04-05 13:20:12 EDT; 1min 24s ago
|
||||
TriggeredBy: ● docker.socket
|
||||
Docs: https://docs.docker.com
|
||||
Main PID: 19389 (dockerd)
|
||||
Tasks: 10
|
||||
Memory: 28.7M
|
||||
CPU: 172ms
|
||||
CGroup: /system.slice/docker.service
|
||||
└─19389 /usr/bin/dockerd -H fd:// --containerd=/run/containerd/containerd.sock
|
||||
...
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> Si Docker ne roule pas, p.ex. vous voyez :
|
||||
> ```
|
||||
> ○ docker.service - Docker Application Container Engine
|
||||
> Loaded: loaded (/lib/systemd/system/docker.service; enabled; vendor preset: enabled)
|
||||
> Active: inactive (dead)
|
||||
> ```
|
||||
> Vous devez démarrer Docker :
|
||||
> ```
|
||||
> sudo systemctl start docker
|
||||
> ```
|
||||
|
||||
## Étape 2 - Installation de Docker Compose
|
||||
|
||||
Créez un répertoire d'installation Docker Compose :
|
||||
```
|
||||
mkdir -p ~/.docker/cli-plugins/
|
||||
```
|
||||
|
||||
Obtenez Docker Compose :
|
||||
```
|
||||
curl -SL https://github.com/docker/compose/releases/download/v2.26.1/docker-compose-linux-x86_64 -o ~/.docker/cli-plugins/docker-compose
|
||||
```
|
||||
|
||||
Ensuite, définissez les permissions correctes pour que la commande docker compose soit exécutable :
|
||||
```
|
||||
chmod +x ~/.docker/cli-plugins/docker-compose
|
||||
```
|
||||
|
||||
Pour vérifier que l'installation a réussi, vous pouvez exécuter :
|
||||
```
|
||||
docker compose version
|
||||
```
|
||||
|
||||
|
||||
## Étape 3 - Ajouter notre projet
|
||||
|
||||
|
||||
Commencez par créer un nouveau répertoire dans votre dossier personnel :
|
||||
```
|
||||
mkdir ~/EvalueTonSavoir
|
||||
```
|
||||
|
||||
Puis déplacez-vous dans le répertoire :
|
||||
```
|
||||
cd ~/EvalueTonSavoir
|
||||
```
|
||||
|
||||
Créez un fichier `docker-compose.yaml` à partir du dépôt GitHub :
|
||||
```
|
||||
curl -SL https://raw.githubusercontent.com/ets-cfuhrman-pfe/EvalueTonSavoir/main/docker-compose.yaml -o docker-compose.yaml
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> Avant de continuer, veuillez noter qu'il est crucial de mettre à jour les variables d'environnement dans le script,
|
||||
> car les valeurs actuelles sont des modèles génériques. Assurez-vous de personnaliser ces variables selon les besoins
|
||||
> spécifiques de votre environnement avant d'exécuter le script.
|
||||
|
||||
Avec le fichier docker-compose.yml en place, vous pouvez maintenant exécuter Docker Compose pour démarrer votre environnement :
|
||||
```
|
||||
sudo docker compose up -d
|
||||
```
|
||||
|
||||
Vérifiez que les services fonctionne :
|
||||
```
|
||||
sudo docker ps -a
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
|
||||
Félicitations ! Vous avez maintenant avec succès configuré et lancé EvalueTonSavoir sur votre serveur, prêt à être utilisé.
|
||||
28
documentation/docs/developpeur/documentation/a-propos.md
Normal file
28
documentation/docs/developpeur/documentation/a-propos.md
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
# A propos
|
||||
|
||||
## Lancer la documentation
|
||||
Pour lancer la documentation, il faut installer python et entrer dans le dossier documentation.
|
||||
Il faut ensuite installer les dépendances avec `pip install -r requirements.txt`.
|
||||
Pour lancer le mode développement il faut executer `python -m mkdocs serve`
|
||||
Afin d'accélérer le déploiement et ne pas être touché par des erreurs de "rate-limiting", il est préférable d'utiliser
|
||||
une image docker de plantuml. Pour cela, il faut utiliser la commande suivante :
|
||||
`docker run -d --name plantuml -p 8080:8080 plantuml/plantuml-server:tomcat`
|
||||
|
||||
## Déploiement
|
||||
Le code est automatiquement déployé par la github-action `create-docs.yaml`
|
||||
Celle-ci ouvre le repo et fait les mêmes étapes que "lancer la documentation".
|
||||
Il y a une différence, elle utilise `build` au lieu de `serve` pour ensuite publier avec l'outil [`ghp-import`](https://github.com/c-w/ghp-import).
|
||||
La page est poussée sur la branche [`gh-pages`](https://github.com/ets-cfuhrman-pfe/EvalueTonSavoir/tree/gh-pages) et ensuite publié en tant que [gh-page](https://pages.github.com/)
|
||||
|
||||
## Themes et Plugins
|
||||
Si vous ajoutez des plugins, veuillez mettre a jour le fichier `requirements.txt`.
|
||||
|
||||
La documentation utilise [MkDocs](https://www.mkdocs.org/) avec [le theme matérial]((https://squidfunk.github.io/mkdocs-material/)). Il y a bien des fonctionalités tel que les c
|
||||
ode-blocks qui peuvent être activés.
|
||||
Vous pouvez avoir accès a la documentation ici : [https://squidfunk.github.io/mkdocs-material/reference/code-blocks/](https://squidfunk.github.io/mkdocs-material/reference/code-blocks/)
|
||||
|
||||
## Autre méthode de lancement (virtuel)
|
||||
Si vous avez un probleme avec votre environement et vous avez besoin d'un environement virtuel, il s'agit de faire
|
||||
`python -m venv .venv` dans le dossier document et d'activer cet environemment avec le fichier activate (changeant
|
||||
dépendant de votre invite de commande) : `.venv\script\activate`.
|
||||
Vous pouvez ensuite continuer les autres étapes.
|
||||
22
documentation/docs/developpeur/frontend/index.md
Normal file
22
documentation/docs/developpeur/frontend/index.md
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
## À Propos
|
||||
|
||||
Ce projet représente une interface utilisateur React pour notre application.
|
||||
|
||||
## GIFT text format render (code source)
|
||||
|
||||
Le code original a été développé pour créer une extension VS afin de prendre en charge le format de texte GIFT.
|
||||
|
||||
Le code peut être trouvé ici: [https://codesandbox.io/s/gift-templates-iny09](https://codesandbox.io/s/gift-templates-iny09)
|
||||
|
||||
Nous avons décidé de réutiliser ce code car il fournit un aperçu proche de ce à quoi ressemblent les quiz dans Moodle,
|
||||
étant une plateforme bien connue à l'École de Technologie Supérieure (ÉTS).
|
||||
|
||||
Pour réutiliser le code, nous avons dû installer les packages NPM suivants:
|
||||
|
||||
- [katex](https://www.npmjs.com/package/katex) : Une bibliothèque JavaScript rapide et facile à utiliser pour le rendu mathématique TeX sur le web.
|
||||
- [marked](https://www.npmjs.com/package/marked) : Un analyseur syntaxique et un compilateur de markdown construit pour la vitesse.
|
||||
- [nanoid](https://www.npmjs.com/package/nanoid) : Un générateur d'identifiants de chaîne unique, sécurisé, convivial pour les URL et minuscule (108 octets) pour JavaScript.
|
||||
- [gift-pegjs](https://www.npmjs.com/package/gift-pegjs) : Un analyseur GIFT pour JavaScript utilisant PEG.js.
|
||||
- [@types/katex](https://www.npmjs.com/package/@types/katex) : Définitions TypeScript pour katex.
|
||||
- [@types/marked](https://www.npmjs.com/package/@types/marked) : Définitions TypeScript pour marked.
|
||||
- [@types/nanoid](https://www.npmjs.com/package/@types/nanoid) : Définitions TypeScript pour nanoid.
|
||||
83
documentation/docs/developpeur/index.md
Normal file
83
documentation/docs/developpeur/index.md
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
# Structure haut niveau
|
||||
|
||||
## But du projet
|
||||
ÉvalueTonSavoir a été créé dû aux coûts importants des versions entreprises des logiciels similaires tels que Socrative et
|
||||
Kahoot. Le but principal est d’être capable d’avoir une plateforme auto-hébergée et bien intégrée dans les systèmes
|
||||
déjà présents des établissements scolaire.
|
||||
|
||||
## Requis
|
||||
|
||||
Le but du projet est d'avoir un outil gratuit et libre afin d'améliorer l'apprentissage avec les fonctionnalités suivantes :
|
||||
|
||||
- Permettre aux personnel enseignant de créer des quizs
|
||||
- Permettre aux enseignant de collecter les résultats des quizs
|
||||
- Permettre aux étudiants de faire ces quizs
|
||||
- Permettre aux étudiants d'avoir une rétroaction
|
||||
|
||||
Afin de limiter le niveau de difficulté d'intégration du personnel enseignant:
|
||||
|
||||
- L'utilisation du format [`GIFT`](https://docs.moodle.org/405/en/GIFT_format) déja présent dans moodle doit être utilisé
|
||||
- Le personnel et les étudiants doivent être capable de s'authentifier avec le portail de l'école
|
||||
- Le démarrage du quiz doit se faire de façon rapide et efficace.
|
||||
|
||||
Afin de faciliter le déploiement de masse :
|
||||
|
||||
- Le logiciel doit être facile a déployer sur des machines locales
|
||||
- Le logiciel doit être facile a déployer sur le cloud
|
||||
- Le logiciel doit s'interconnecter à l'infrastructure présente
|
||||
- Le logiciel doit être performant et fiable
|
||||
|
||||
## Architecture actuelle
|
||||
|
||||
```plantuml
|
||||
@startuml
|
||||
|
||||
package Proxy{
|
||||
component Nginx
|
||||
}
|
||||
|
||||
package App{
|
||||
component Frontend
|
||||
component Backend
|
||||
database MongoDb
|
||||
}
|
||||
|
||||
cloud Provider{
|
||||
component QuizRoom
|
||||
}
|
||||
|
||||
|
||||
Nginx --down-> Backend
|
||||
Nginx --down-> Frontend
|
||||
Nginx --down-> Provider
|
||||
|
||||
Backend --right-> MongoDb
|
||||
Backend --up-> Nginx
|
||||
|
||||
Frontend --up-> Nginx
|
||||
|
||||
@enduml
|
||||
```
|
||||
|
||||
### Details techniques
|
||||
|
||||
Le tableau ci-dessus est simplifié grandement car toutes les composantes sont individuelles. Ce qui veut dire que chacune
|
||||
des parties pouraient être déployées sur un serveur différent et tout de même fonctionner. Ceci permettrai de distribuer
|
||||
la charge de travail facilement entre plusieurs serveurs.
|
||||
|
||||
Le proxy Nginx permet de camoufler la séparation du backend et frontend en réunissant les deux parties sous la même url.
|
||||
Il a aussi la tâche de diriger les appels de sockets vers leur machine interne dans le provider.
|
||||
|
||||
Le frontend dessert la partie visuelle de l'application.
|
||||
|
||||
Le backend s'occupe de tout les services suivants :
|
||||
|
||||
- La gestion des utilisateurs
|
||||
- La gestion des quizs
|
||||
- La gestion des médias
|
||||
- La gestion des salles
|
||||
|
||||
|
||||
### Liens vers détails supplémentaires
|
||||
- [Gestion de l'authentification](./backend/auth.md)
|
||||
- [Gestion de la salle de Quiz](./backend/salle-de-quiz.md)
|
||||
BIN
documentation/docs/developpeur/test/test-charge-output.png
Normal file
BIN
documentation/docs/developpeur/test/test-charge-output.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 53 KiB |
79
documentation/docs/developpeur/test/test-de-charge.md
Normal file
79
documentation/docs/developpeur/test/test-de-charge.md
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
# Tests de Charge
|
||||
|
||||
Pour tester la montée en charge et les performances du projet, un **test de charge** est inclus dans `test/stressTest`. Il est conçu spécifiquement pour **evalue-ton-savoir**, avec un focus sur les communications serveur-client et client-client.
|
||||
|
||||
---
|
||||
|
||||
## Routes utilisé sur le quizRoom
|
||||
- **`get-usage`** : Récupère les ressources des conteneurs du réseau.
|
||||
- **`message-from-teacher`** : Transfert de messages des professeurs aux étudiants.
|
||||
- **`message-from-student`** : Transfert de messages des étudiants aux professeurs.
|
||||
|
||||
---
|
||||
|
||||
## Fonctionnement
|
||||
|
||||
1. **Authentification** : Récupère un token depuis l’API backend.
|
||||
2. **Configuration** : Crée les salles de quiz et connecte un professeur à chaque salle.
|
||||
3. **Connexion des étudiants** : Connecte les étudiants aux salles selon les paramètres.
|
||||
4. **Simulation** : Messages simulés entre professeurs et étudiants.
|
||||
5. **Collecte des données** : Collecte les métriques de ressources pour analyse.
|
||||
|
||||
---
|
||||
|
||||
## Exécution
|
||||
|
||||
L'exécution des commandes doit se faire ici: `/test/stressTest`
|
||||
|
||||
### Directe
|
||||
|
||||
```bash
|
||||
node main.js
|
||||
```
|
||||
- Node.js doit être installé.
|
||||
- Modifiez les variables dans main.js.
|
||||
|
||||
### Docker
|
||||
|
||||
```bash
|
||||
docker-compose up
|
||||
```
|
||||
- Docker doit être installé.
|
||||
- Configurez un fichier .env.
|
||||
|
||||
|
||||
## Variables d’Environnement
|
||||
|
||||
Les variables sont définies dans un fichier `.env` :
|
||||
|
||||
- **BASE_URL** : URL à tester.
|
||||
- **USER_EMAIL**, **USER_PASSWORD** : Identifiants pour créer et gérer les salles.
|
||||
- **NUMBER_ROOMS** : Nombre de salles.
|
||||
- **USERS_PER_ROOM** : Nombre d’étudiants par salle.
|
||||
|
||||
### Variables Optionnelles
|
||||
- **MAX_MESSAGES_ROUND** : Nombre maximum de messages par cycle.
|
||||
- **CONVERSATION_INTERVAL** : Délai (ms) entre les messages.
|
||||
- **MESSAGE_RESPONSE_TIMEOUT** : Délai (ms) avant de considérer un message sans réponse.
|
||||
- **BATCH_DELAY** : Délai (ms) entre les envois par lots.
|
||||
- **BATCH_SIZE** : Taille des lots de messages.
|
||||
|
||||
---
|
||||
|
||||
## Résultats Collectés
|
||||
|
||||
### Métriques
|
||||
- **Salles créées / échouées**
|
||||
- **Utilisateurs connectés / échoués**
|
||||
- **Messages tentés, envoyés, reçus**
|
||||
|
||||
### Rapports
|
||||
- **JSON** : Pour analyse automatisée.
|
||||
- **Rapport texte** : Résumé lisible.
|
||||
- **Graphiques** *(via ChartJS)* :
|
||||
- **CPU**, **mémoire**, **charge**.
|
||||
|
||||
|
||||
### Exemple graphique:
|
||||
|
||||

|
||||
23
documentation/docs/index.md
Normal file
23
documentation/docs/index.md
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# A propos
|
||||
|
||||
EvalueTonSavoir est une plateforme open source et auto-hébergée qui poursuit le développement du code provenant de [https://github.com/ETS-PFE004-Plateforme-sondage-minitest](https://github.com/ETS-PFE004-Plateforme-sondage-minitest). Cette plateforme minimaliste est conçue comme un outil d'apprentissage et d'enseignement, offrant une solution simple et efficace pour la création de quiz utilisant le format GIFT, similaire à Moodle.
|
||||
|
||||
## Fonctionnalités clés
|
||||
|
||||
* Open Source et Auto-hébergé : Possédez et contrôlez vos données en déployant la plateforme sur votre propre infrastructure.
|
||||
* Compatibilité GIFT : Créez des quiz facilement en utilisant le format GIFT, permettant une intégration transparente avec d'autres systèmes d'apprentissage.
|
||||
* Minimaliste et Efficace : Une approche bare bones pour garantir la simplicité et la facilité d'utilisation, mettant l'accent sur l'essentiel de l'apprentissage.
|
||||
|
||||
## Contribution
|
||||
|
||||
Actuellement, il n'y a pas de modèle établi pour les contributions. Si vous constatez quelque chose de manquant ou si vous pensez qu'une amélioration est possible, n'hésitez pas à ouvrir un issue et/ou une PR)
|
||||
|
||||
## Liens utiles
|
||||
|
||||
* [Dépôt d'origine Frontend](https://github.com/ETS-PFE004-Plateforme-sondage-minitest/ETS-PFE004-EvalueTonSavoir-Frontend)
|
||||
* [Dépôt d'origine Backend](https://github.com/ETS-PFE004-Plateforme-sondage-minitest/ETS-PFE004-EvalueTonSavoir-Backend)
|
||||
* [Documentation (Wiki)](https://github.com/ets-cfuhrman-pfe/EvalueTonSavoir/wiki)
|
||||
|
||||
## License
|
||||
|
||||
EvalueTonSavoir is open-sourced and licensed under the [MIT License](/LICENSE).
|
||||
10
documentation/docs/javascripts/katex.js
Normal file
10
documentation/docs/javascripts/katex.js
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
document$.subscribe(({ body }) => {
|
||||
renderMathInElement(body, {
|
||||
delimiters: [
|
||||
{ left: "$$", right: "$$", display: true },
|
||||
{ left: "$", right: "$", display: false },
|
||||
{ left: "\\(", right: "\\)", display: false },
|
||||
{ left: "\\[", right: "\\]", display: true }
|
||||
],
|
||||
})
|
||||
})
|
||||
84
documentation/docs/utilisateur/configuration.md
Normal file
84
documentation/docs/utilisateur/configuration.md
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
> [!NOTE]
|
||||
> Chaque projet contient un fichier `.env.example` fournissant des exemples de configuration.
|
||||
> Assurez-vous de consulter ce fichier pour vous inspirer des paramètres nécessaires à votre configuration.
|
||||
|
||||
> [!NOTE]
|
||||
> Ce sont toutes les options de configuration. N'hésitez pas à ouvrir une PR si vous en voyez qui manquent.
|
||||
|
||||
## Options de Configuration Backend
|
||||
|
||||
| Variable d'Environnement | Description | Exemple | Optionnel |
|
||||
|---|---|---|---|
|
||||
| `PORT` | Le port sur lequel l'application fonctionne | 4400 | non|
|
||||
| `MONGO_URI` | La chaîne de connexion pour se connecter à la base de données mongodb | `mongodb://localhost:27017` or `mongodb://127.0.0.1:27017` (the former can cause trouble on Windows depending on hosts files) | non|
|
||||
| `MONGO_DATABASE` | Le nom souhaité pour la base de données | evaluetonsavoir | non|
|
||||
| `EMAIL_SERVICE` | Le service utilisé pour les e-mails | gmail | non|
|
||||
| `SENDER_EMAIL` | L'adresse e-mail utilisée pour l'envoi | monadresse@gmail.com | non|
|
||||
| `EMAIL_PSW` | Le mot de passe de l'adresse e-mail | 'monmotdepasse' | non|
|
||||
| `JWT_SECRET` | Le secret utilisé pour la gestion des JWT | monsecretJWT | non|
|
||||
| `FRONTEND_URL` | URL du frontend, y compris le port | http://localhost:5173 | non|
|
||||
|
||||
## Options de Configuration Frontend
|
||||
|
||||
| Variable d'Environnement | Description | Exemple | Optionnel |
|
||||
|---|---|---|---|
|
||||
| `VITE_BACKEND_URL` | URL du backend, y compris le port | http://localhost:4400 | non|
|
||||
| `VITE_AZURE_BACKEND_URL` | URL du backend, y compris le port | http://localhost:4400 | non|
|
||||
|
||||
## Options de Configuration du routeur
|
||||
| Variable d'Environnement | Description | Exemple | Optionnel défaut |
|
||||
|---|---|---|---|
|
||||
| `PORT` | Numero de port sur lequel la NGINX écoute | http://localhost:80 | oui|
|
||||
| `FRONTEND_HOST` | Url relié au Frontend | http://localhost |oui
|
||||
| `FRONTEND_PORT` | Port relié au Frontend | http://localhost:5173 | oui|
|
||||
| `BACKEND_HOST` | Url relié au Backend | http://localhost |oui
|
||||
| `BACKEND_PORT` | Port relié au Backend | http://localhost:3000 | oui|
|
||||
|
||||
## Options de Configuration de la salle de Quiz
|
||||
| Variable d'Environnement | Description | Exemple | Optionnel défaut |
|
||||
|---|---|---|---|
|
||||
| `PORT` | Numero de port sur lequel la salle écoute | http://localhost:4500 | oui|
|
||||
| `ROOM_ID` | Numéro de la salle | http://localhost/rooms/000000 | oui|
|
||||
|
||||
## HealthChecks
|
||||
|
||||
### Frontend
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -f http://localhost:$${PORT} || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
start_period: 5s
|
||||
retries: 6
|
||||
|
||||
### Backend
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -f http://localhost:$${PORT}/health || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
start_period: 5s
|
||||
retries: 6
|
||||
|
||||
### Salle de Quiz
|
||||
healthcheck:
|
||||
test: ["CMD", "/usr/src/app/healthcheck.sh"]
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
start_period: 5s
|
||||
retries: 6
|
||||
|
||||
### Routeur
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget --spider http://0.0.0.0:$${PORT}/health || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
start_period: 5s
|
||||
retries: 6
|
||||
|
||||
### MongoDb
|
||||
|
||||
healthcheck:
|
||||
test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 20s
|
||||
18
documentation/docs/utilisateur/deploiment.md
Normal file
18
documentation/docs/utilisateur/deploiment.md
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Déploiement
|
||||
|
||||
Les méthodes recommandées de déploiement sont via Ansible et Opentofu.
|
||||
Ansible est utilisés afin de faire un déploiement sur un serveur local, opentofu sur le cloud.
|
||||
|
||||
## Ansible
|
||||
|
||||
Le déploiement avec ansible est un déploiement simplifié.
|
||||
Il vous suffit d'avoir un ordinateur linux/mac ou pouvant faire exécuter [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install)
|
||||
dans le cas de windows. Il faut ensuite utiliser le gestionnaire de paquet (souvent apt) afin d'installer
|
||||
le paquet `ansible-core`, d'autres méthodes sont indiquées dans la [documentation officielle de ansible](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html).
|
||||
Une fois le tout fait, vous pouvez telécharger [les fichiers nécéssaire](https://github.com/ets-cfuhrman-pfe/EvalueTonSavoir/ansible) et lancer la commande
|
||||
`ansible-playbook -i inventory.ini deploy.yml`
|
||||
|
||||
## OpenTofu
|
||||
Le déploiement avec OpenTofu est un peu plus complexe mais il permet d'héberger la solution sur votre cloud préféré.
|
||||
Il suffit [d'installer OpenTofu](https://opentofu.org/docs/intro/install/) et de téléchgarger [les fichiers nécéssaires](https://github.com/ets-cfuhrman-pfe/EvalueTonSavoir/opentofu).
|
||||
Un Readme est inclus afin d'organiser votre grappe de serveurs.
|
||||
79
documentation/mkdocs.yml
Normal file
79
documentation/mkdocs.yml
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
site_name: EvalueTonSavoir
|
||||
repo_url: https://github.com/ets-cfuhrman-pfe/EvalueTonSavoir
|
||||
edit_uri: edit/main/documentation/docs
|
||||
|
||||
theme:
|
||||
language: fr
|
||||
icon:
|
||||
repo: fontawesome/brands/github
|
||||
name: material
|
||||
palette:
|
||||
# Palette toggle for light mode
|
||||
- media: "(prefers-color-scheme: light)"
|
||||
scheme: default
|
||||
primary: red
|
||||
accent: pink
|
||||
toggle:
|
||||
icon: material/brightness-7
|
||||
name: Mode sombre
|
||||
|
||||
# Palette toggle for dark mode
|
||||
- media: "(prefers-color-scheme: dark)"
|
||||
scheme: slate
|
||||
primary: red
|
||||
accent: pink
|
||||
toggle:
|
||||
icon: material/brightness-4
|
||||
name: Mode clair
|
||||
features:
|
||||
- content.code.copy
|
||||
- content.code.select
|
||||
- content.code.annotate
|
||||
- navigation.instant
|
||||
- navigation.instant.progress
|
||||
- navigation.tracking
|
||||
- content.action.edit
|
||||
highlightjs: true
|
||||
hljs_languages:
|
||||
- javascript
|
||||
- typescript
|
||||
- css
|
||||
- react
|
||||
- yaml
|
||||
- latex
|
||||
- katex
|
||||
- gift
|
||||
|
||||
use_directory_urls: false
|
||||
|
||||
plugins:
|
||||
- search
|
||||
- offline
|
||||
- plantuml:
|
||||
puml_url: !ENV [PUMLURL,'http://localhost:8080'] # dev
|
||||
puml_keyword: plantuml
|
||||
theme:
|
||||
light: material/red-light
|
||||
dark: material/red-dark
|
||||
- swagger-ui-tag:
|
||||
docExpansion: "list"
|
||||
tryItOutEnabled: false
|
||||
|
||||
markdown_extensions:
|
||||
- pymdownx.highlight:
|
||||
anchor_linenums: true
|
||||
line_spans: __span
|
||||
pygments_lang_class: true
|
||||
- pymdownx.inlinehilite
|
||||
- pymdownx.snippets
|
||||
- pymdownx.superfences
|
||||
- pymdownx.arithmatex:
|
||||
generic: true
|
||||
|
||||
extra_javascript:
|
||||
- javascripts/katex.js
|
||||
- https://unpkg.com/katex@0/dist/katex.min.js
|
||||
- https://unpkg.com/katex@0/dist/contrib/auto-render.min.js
|
||||
|
||||
extra_css:
|
||||
- https://unpkg.com/katex@0/dist/katex.min.css
|
||||
1
documentation/python-version
Normal file
1
documentation/python-version
Normal file
|
|
@ -0,0 +1 @@
|
|||
3.12
|
||||
7
documentation/requirements.txt
Normal file
7
documentation/requirements.txt
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
mkdocs
|
||||
mkdocs[i18n]
|
||||
mkdocs_puml
|
||||
mkdocs-material
|
||||
Pygments
|
||||
ghp-import
|
||||
mkdocs-swagger-ui-tag
|
||||
5
nginx/.env.example
Normal file
5
nginx/.env.example
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
PORT=80
|
||||
FRONTEND_HOST=frontend
|
||||
FRONTEND_PORT=5173
|
||||
BACKEND_HOST=backend
|
||||
BACKEND_PORT=3000
|
||||
|
|
@ -1,3 +1,90 @@
|
|||
FROM nginx
|
||||
# Stage 1: Build stage
|
||||
FROM nginx:1.27-alpine AS builder
|
||||
# Install required packages
|
||||
RUN apk add --no-cache nginx-mod-http-js nginx-mod-http-keyval
|
||||
|
||||
COPY ./default.conf /etc/nginx/conf.d/default.conf
|
||||
# Stage 2: Final stage
|
||||
FROM alpine:3.19
|
||||
|
||||
# Install gettext for envsubst and other dependencies
|
||||
RUN apk add --no-cache \
|
||||
gettext \
|
||||
curl \
|
||||
nginx-mod-http-js \
|
||||
nginx-mod-http-keyval \
|
||||
pcre2 \
|
||||
ca-certificates \
|
||||
pcre \
|
||||
libgcc \
|
||||
libstdc++ \
|
||||
zlib \
|
||||
libxml2 \
|
||||
libedit \
|
||||
geoip \
|
||||
libxslt
|
||||
|
||||
# Create base nginx directory
|
||||
RUN mkdir -p /etc/nginx
|
||||
|
||||
# Copy Nginx and NJS modules from builder
|
||||
COPY --from=builder /usr/sbin/nginx /usr/sbin/
|
||||
COPY --from=builder /usr/lib/nginx/modules/ /usr/lib/nginx/modules/
|
||||
RUN rm -rf /etc/nginx/*
|
||||
COPY --from=builder /etc/nginx/ /etc/nginx/
|
||||
COPY --from=builder /usr/lib/nginx/ /usr/lib/nginx/
|
||||
|
||||
# Setup directories and permissions
|
||||
RUN mkdir -p /var/cache/nginx \
|
||||
&& mkdir -p /var/log/nginx \
|
||||
&& mkdir -p /etc/nginx/conf.d \
|
||||
&& mkdir -p /etc/nginx/njs \
|
||||
&& mkdir -p /etc/nginx/templates \
|
||||
&& chown -R nginx:nginx /var/cache/nginx \
|
||||
&& chown -R nginx:nginx /var/log/nginx \
|
||||
&& chown -R nginx:nginx /etc/nginx \
|
||||
&& touch /var/run/nginx.pid \
|
||||
&& chown nginx:nginx /var/run/nginx.pid \
|
||||
&& chmod 777 /var/log/nginx
|
||||
|
||||
# Copy necessary libraries from builder
|
||||
COPY --from=builder /usr/lib/libxml2.so* /usr/lib/
|
||||
COPY --from=builder /usr/lib/libexslt.so* /usr/lib/
|
||||
COPY --from=builder /usr/lib/libgd.so* /usr/lib/
|
||||
COPY --from=builder /usr/lib/libxslt.so* /usr/lib/
|
||||
|
||||
# Modify nginx.conf to load modules
|
||||
RUN echo 'load_module modules/ngx_http_js_module.so;' > /tmp/nginx.conf && \
|
||||
cat /etc/nginx/nginx.conf >> /tmp/nginx.conf && \
|
||||
mv /tmp/nginx.conf /etc/nginx/nginx.conf
|
||||
|
||||
# Copy configurations
|
||||
COPY templates/default.conf /etc/nginx/templates/
|
||||
COPY njs/main.js /etc/nginx/njs/
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN dos2unix /entrypoint.sh
|
||||
|
||||
ENV PORT=80 \
|
||||
FRONTEND_HOST=frontend \
|
||||
FRONTEND_PORT=5173 \
|
||||
BACKEND_HOST=backend \
|
||||
BACKEND_PORT=3000
|
||||
|
||||
# Set final permissions
|
||||
RUN chmod +x /entrypoint.sh && \
|
||||
chown -R nginx:nginx /etc/nginx && \
|
||||
chown -R nginx:nginx /var/log/nginx && \
|
||||
chown -R nginx:nginx /var/cache/nginx && \
|
||||
chmod 755 /etc/nginx && \
|
||||
chmod 777 /etc/nginx/conf.d && \
|
||||
chmod 644 /etc/nginx/templates/default.conf && \
|
||||
chmod 644 /etc/nginx/conf.d/default.conf
|
||||
|
||||
# Switch to nginx user
|
||||
USER nginx
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD wget -q --spider http://0.0.0.0:${PORT}/health || exit 1
|
||||
|
||||
# Start Nginx using entrypoint script
|
||||
# CMD [ "/bin/sh","-c","sleep 3600" ] # For debugging
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
upstream frontend {
|
||||
server frontend:5173;
|
||||
}
|
||||
|
||||
upstream backend {
|
||||
server backend:3000;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
location /api {
|
||||
rewrite /backend/(.*) /$1 break;
|
||||
proxy_pass http://backend;
|
||||
}
|
||||
|
||||
location /socket.io {
|
||||
rewrite /backend/(.*) /$1 break;
|
||||
proxy_pass http://backend;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "Upgrade";
|
||||
proxy_set_header Host $host;
|
||||
proxy_hide_header 'Access-Control-Allow-Origin';
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_pass http://frontend;
|
||||
}
|
||||
|
||||
}
|
||||
15
nginx/entrypoint.sh
Normal file
15
nginx/entrypoint.sh
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
#!/bin/sh
|
||||
# entrypoint.sh
|
||||
|
||||
# We are already running as nginx user
|
||||
envsubst '${PORT} ${FRONTEND_HOST} ${FRONTEND_PORT} ${BACKEND_HOST} ${BACKEND_PORT}' \
|
||||
< /etc/nginx/templates/default.conf \
|
||||
> /etc/nginx/conf.d/default.conf
|
||||
|
||||
# Adds logs for docker
|
||||
ln -sf /dev/stdout /var/log/nginx/access.log
|
||||
ln -sf /dev/stderr /var/log/nginx/error.log
|
||||
ln -sf /dev/stderr /var/log/nginx/debug.log
|
||||
|
||||
# Start nginx
|
||||
exec nginx -g "daemon off;"
|
||||
97
nginx/njs/main.js
Normal file
97
nginx/njs/main.js
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
function get_cache_dict(r) {
|
||||
return '';
|
||||
}
|
||||
|
||||
function getCachedData(r, key) {
|
||||
try {
|
||||
const cached = ngx.shared.cache.get(key);
|
||||
if (cached) {
|
||||
const data = JSON.parse(cached);
|
||||
const now = Date.now();
|
||||
// 2 minutes cache - let game rooms rotate
|
||||
if (now - data.timestamp < 120000) {
|
||||
r.error(`Debug: Cache hit for ${key}, age: ${(now - data.timestamp)/1000}s`);
|
||||
return data.value;
|
||||
}
|
||||
r.error(`Debug: Cache expired for ${key}, age: ${(now - data.timestamp)/1000}s`);
|
||||
}
|
||||
return null;
|
||||
} catch (error) {
|
||||
r.error(`Cache read error: ${error}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function setCachedData(r, key, value) {
|
||||
try {
|
||||
const data = {
|
||||
timestamp: Date.now(),
|
||||
value: value
|
||||
};
|
||||
ngx.shared.cache.set(key, JSON.stringify(data));
|
||||
r.error(`Debug: Cached ${key}`);
|
||||
} catch (error) {
|
||||
r.error(`Cache write error: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchRoomInfo(r) {
|
||||
const cacheKey = `room:${r.variables.room_id}`;
|
||||
|
||||
try {
|
||||
const cachedRoom = getCachedData(r, cacheKey);
|
||||
if (cachedRoom) {
|
||||
r.error(`Debug: Room info from cache: ${JSON.stringify(cachedRoom)}`);
|
||||
return cachedRoom;
|
||||
}
|
||||
|
||||
let res = await r.subrequest('/api/room/' + r.variables.room_id, {
|
||||
method: 'GET'
|
||||
});
|
||||
|
||||
if (res.status !== 200) {
|
||||
r.error(`Failed to fetch room info: ${res.status}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
let room = JSON.parse(res.responseText);
|
||||
setCachedData(r, cacheKey, room);
|
||||
r.error(`Debug: Room info fetched and cached: ${JSON.stringify(room)}`);
|
||||
return room;
|
||||
|
||||
} catch (error) {
|
||||
r.error(`Error fetching/caching room info: ${error}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
get_cache_dict,
|
||||
routeWebSocket: async function(r) {
|
||||
try {
|
||||
const roomInfo = await fetchRoomInfo(r);
|
||||
|
||||
if (!roomInfo || !roomInfo.host) {
|
||||
r.error(`Debug: Invalid room info: ${JSON.stringify(roomInfo)}`);
|
||||
r.return(404, 'Room not found or invalid');
|
||||
return;
|
||||
}
|
||||
|
||||
let proxyUrl = roomInfo.host;
|
||||
if (!proxyUrl.startsWith('http://') && !proxyUrl.startsWith('https://')) {
|
||||
proxyUrl = 'http://' + proxyUrl;
|
||||
}
|
||||
|
||||
r.error(`Debug: Original URL: ${r.uri}`);
|
||||
r.error(`Debug: Setting proxy target to: ${proxyUrl}`);
|
||||
r.error(`Debug: Headers: ${JSON.stringify(r.headersIn)}`);
|
||||
|
||||
r.variables.proxy_target = proxyUrl;
|
||||
r.internalRedirect('@websocket_proxy');
|
||||
|
||||
} catch (error) {
|
||||
r.error(`WebSocket routing error: ${error}`);
|
||||
r.return(500, 'Internal routing error');
|
||||
}
|
||||
}
|
||||
};
|
||||
81
nginx/templates/default.conf
Normal file
81
nginx/templates/default.conf
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
js_shared_dict_zone zone=cache:10m;
|
||||
js_import njs/main.js;
|
||||
js_set $cache_dict main.get_cache_dict;
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
upstream frontend {
|
||||
server ${FRONTEND_HOST}:${FRONTEND_PORT};
|
||||
}
|
||||
|
||||
upstream backend {
|
||||
server ${BACKEND_HOST}:${BACKEND_PORT};
|
||||
}
|
||||
|
||||
server {
|
||||
listen ${PORT};
|
||||
|
||||
set $proxy_target "";
|
||||
|
||||
location /health {
|
||||
access_log off;
|
||||
add_header Content-Type text/plain;
|
||||
return 200 'healthy';
|
||||
}
|
||||
|
||||
location /backend-health {
|
||||
proxy_pass http://backend/health;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
access_log off;
|
||||
}
|
||||
|
||||
location /frontend-health {
|
||||
proxy_pass http://frontend;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
access_log off;
|
||||
}
|
||||
|
||||
location /api {
|
||||
proxy_pass http://backend;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
# Game WebSocket routing
|
||||
location ~/api/room/([^/]+)/socket {
|
||||
set $room_id $1;
|
||||
js_content main.routeWebSocket;
|
||||
}
|
||||
|
||||
# WebSocket proxy location
|
||||
location @websocket_proxy {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
# Timeouts
|
||||
proxy_connect_timeout 7m;
|
||||
proxy_send_timeout 7m;
|
||||
proxy_read_timeout 7m;
|
||||
proxy_buffering off;
|
||||
|
||||
proxy_pass $proxy_target;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_pass http://frontend;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
}
|
||||
44
opentofu/README.md
Normal file
44
opentofu/README.md
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
# Déploiement avec Opentofu
|
||||
|
||||
## Microsoft Azure
|
||||
|
||||
### Installer opentofu
|
||||
|
||||
https://opentofu.org/docs/intro/install/
|
||||
|
||||
### Installer Azure CLI
|
||||
|
||||
https://learn.microsoft.com/en-us/cli/azure/install-azure-cli#install
|
||||
|
||||
### Se connecter à Azure et récupérer l'id de l'abonnement Azure
|
||||
|
||||
Pour se connecter à Azure, faites la commande suivante
|
||||
|
||||
`az login`
|
||||
|
||||
Avec cette commande, vous allez sélectionner un abonnement Azure. Copiez l'id de l'abonnement, vous en aurez besoin
|
||||
dans l'étape suivant.
|
||||
|
||||
### Modifier les configurations
|
||||
|
||||
Créer un fichier **terraform.tfvars** sur la base du fichier **terraform.tfvars.example** dans le répertoire **azure**.
|
||||
Vous pouvez changer toutes les variables utilisée lors du déploiement dans ce fichier.
|
||||
Toutes les variables, leur description et leur valeur par défaut sont disponibles dans le fichier **variables.tf**.
|
||||
|
||||
Créer un fichier **auth_config.json** sur la base du fichier **auth_config.json.example** dans le répertoire **opentofu**.
|
||||
|
||||
L'url est défini comme suit: http://<container_group_app_dns>.<location>.cloudapp.azure.com.
|
||||
Par défaut, l'url est http://evaluetonsavoir.canadacentral.cloudapp.azure.com/
|
||||
|
||||
### Lancer le déploiement
|
||||
|
||||
Pour lancer le déploiement, faites les commandes suivantes
|
||||
|
||||
`cd azure`
|
||||
`az login`
|
||||
`tofu init`
|
||||
`tofu apply`
|
||||
|
||||
Ensuite, opentofu va afficher toutes les actions qu'il va effectuer avec les valeurs configurées.
|
||||
Entrez `yes` pour appliquer ces actions et lancer le déploiement.
|
||||
|
||||
35
opentofu/auth_config.json.example
Normal file
35
opentofu/auth_config.json.example
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
auth: {
|
||||
passportjs: [
|
||||
{
|
||||
provider1: {
|
||||
type: "oauth",
|
||||
OAUTH_AUTHORIZATION_URL: "https://www.testurl.com/oauth2/authorize",
|
||||
OAUTH_TOKEN_URL: "https://www.testurl.com/oauth2/token",
|
||||
OAUTH_USERINFO_URL: "https://www.testurl.com/oauth2/userinfo/",
|
||||
OAUTH_CLIENT_ID: "your_oauth_client_id",
|
||||
OAUTH_CLIENT_SECRET: "your_oauth_client_secret",
|
||||
OAUTH_ADD_SCOPE: "scopes",
|
||||
OAUTH_ROLE_TEACHER_VALUE: "teacher-claim-value",
|
||||
OAUTH_ROLE_STUDENT_VALUE: "student-claim-value",
|
||||
},
|
||||
},
|
||||
{
|
||||
provider2: {
|
||||
type: "oidc",
|
||||
OIDC_CLIENT_ID: "your_oidc_client_id",
|
||||
OIDC_CLIENT_SECRET: "your_oidc_client_secret",
|
||||
OIDC_CONFIG_URL: "https://your-issuer.com",
|
||||
OIDC_ADD_SCOPE: "groups",
|
||||
OIDC_ROLE_TEACHER_VALUE: "teacher-claim-value",
|
||||
OIDC_ROLE_STUDENT_VALUE: "student-claim-value",
|
||||
},
|
||||
},
|
||||
],
|
||||
"simpleauth": {
|
||||
enabled: true,
|
||||
name: "provider3",
|
||||
SESSION_SECRET: "your_session_secret",
|
||||
},
|
||||
},
|
||||
}
|
||||
67
opentofu/azure/app.tf
Normal file
67
opentofu/azure/app.tf
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
# Create Virtual Machine
|
||||
resource "azurerm_linux_virtual_machine" "vm" {
|
||||
name = var.vm_name
|
||||
resource_group_name = azurerm_resource_group.resource_group.name
|
||||
location = azurerm_resource_group.resource_group.location
|
||||
size = var.vm_size
|
||||
admin_username = var.vm_user
|
||||
admin_password = var.vm_password
|
||||
disable_password_authentication = false
|
||||
|
||||
network_interface_ids = [azurerm_network_interface.nic.id]
|
||||
|
||||
os_disk {
|
||||
name = var.vm_os_disk_name
|
||||
caching = "ReadWrite"
|
||||
storage_account_type = var.vm_os_disk_type
|
||||
}
|
||||
|
||||
source_image_reference {
|
||||
publisher = var.vm_image_publisher
|
||||
offer = var.vm_image_offer
|
||||
sku = var.vm_image_plan
|
||||
version = var.vm_image_version
|
||||
}
|
||||
|
||||
custom_data = base64encode(<<-EOT
|
||||
#!/bin/bash
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y docker.io
|
||||
sudo apt-get install -y docker-compose
|
||||
sudo systemctl start docker
|
||||
sudo systemctl enable docker
|
||||
|
||||
sudo usermod -aG docker ${var.vm_user}
|
||||
sudo newgrp docker
|
||||
|
||||
su - ${var.vm_user} -c '
|
||||
|
||||
curl -o auth_config.json \
|
||||
"https://${azurerm_storage_account.storage_account.name}.file.core.windows.net/${azurerm_storage_share.backend_storage_share.name}/auth_config.json${data.azurerm_storage_account_sas.storage_access.sas}"
|
||||
|
||||
curl -L -o docker-compose.yaml ${var.docker_compose_url}
|
||||
|
||||
export VITE_BACKEND_URL=http://${var.dns}.${lower(replace(azurerm_resource_group.resource_group.location, " ", ""))}.cloudapp.azure.com
|
||||
export PORT=${var.backend_port}
|
||||
export MONGO_URI="${azurerm_cosmosdb_account.cosmosdb_account.primary_mongodb_connection_string}"
|
||||
export MONGO_DATABASE=${azurerm_cosmosdb_mongo_collection.cosmosdb_mongo_collection.database_name}
|
||||
export EMAIL_SERVICE=${var.backend_email_service}
|
||||
export SENDER_EMAIL=${var.backend_email_sender}
|
||||
export EMAIL_PSW="${var.backend_email_password}"
|
||||
export JWT_SECRET=${var.backend_jwt_secret}
|
||||
export SESSION_Secret=${var.backend_session_secret}
|
||||
export SITE_URL=http://${var.dns}.${lower(replace(azurerm_resource_group.resource_group.location, " ", ""))}.cloudapp.azure.com
|
||||
export FRONTEND_PORT=${var.frontend_port}
|
||||
export USE_PORTS=${var.backend_use_port}
|
||||
export AUTHENTICATED_ROOMS=${var.backend_use_auth_student}
|
||||
export QUIZROOM_IMAGE=${var.quizroom_image}
|
||||
|
||||
docker-compose up -d
|
||||
'
|
||||
EOT
|
||||
)
|
||||
|
||||
depends_on = [
|
||||
azurerm_cosmosdb_mongo_collection.cosmosdb_mongo_collection,
|
||||
data.azurerm_storage_account_sas.storage_access]
|
||||
}
|
||||
43
opentofu/azure/database.tf
Normal file
43
opentofu/azure/database.tf
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
resource "azurerm_cosmosdb_account" "cosmosdb_account" {
|
||||
name = var.cosmosdb_account_name
|
||||
resource_group_name = azurerm_resource_group.resource_group.name
|
||||
location = azurerm_resource_group.resource_group.location
|
||||
offer_type = "Standard"
|
||||
kind = "MongoDB"
|
||||
mongo_server_version = "7.0"
|
||||
|
||||
is_virtual_network_filter_enabled = true
|
||||
|
||||
virtual_network_rule {
|
||||
id = azurerm_subnet.subnet.id
|
||||
}
|
||||
|
||||
capabilities {
|
||||
name = "EnableMongo"
|
||||
}
|
||||
|
||||
consistency_policy {
|
||||
consistency_level = "Session"
|
||||
}
|
||||
|
||||
geo_location {
|
||||
failover_priority = 0
|
||||
location = azurerm_resource_group.resource_group.location
|
||||
}
|
||||
|
||||
depends_on = [azurerm_resource_group.resource_group]
|
||||
}
|
||||
|
||||
resource "azurerm_cosmosdb_mongo_collection" "cosmosdb_mongo_collection" {
|
||||
name = var.mongo_database_name
|
||||
resource_group_name = azurerm_resource_group.resource_group.name
|
||||
account_name = azurerm_cosmosdb_account.cosmosdb_account.name
|
||||
database_name = var.mongo_database_name
|
||||
|
||||
index {
|
||||
keys = ["_id"]
|
||||
unique = true
|
||||
}
|
||||
|
||||
depends_on = [azurerm_cosmosdb_account.cosmosdb_account]
|
||||
}
|
||||
14
opentofu/azure/main.tf
Normal file
14
opentofu/azure/main.tf
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
terraform {
|
||||
required_providers {
|
||||
azurerm = {
|
||||
source = "hashicorp/azurerm"
|
||||
version = "~> 4.0"
|
||||
}
|
||||
}
|
||||
required_version = ">= 1.0"
|
||||
}
|
||||
|
||||
provider "azurerm" {
|
||||
features {}
|
||||
subscription_id = var.subscription_id
|
||||
}
|
||||
87
opentofu/azure/network.tf
Normal file
87
opentofu/azure/network.tf
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
# Create Virtual Network
|
||||
resource "azurerm_virtual_network" "vnet" {
|
||||
name = var.vnet_name
|
||||
location = azurerm_resource_group.resource_group.location
|
||||
resource_group_name = azurerm_resource_group.resource_group.name
|
||||
address_space = ["10.0.0.0/16"]
|
||||
}
|
||||
|
||||
# Create Subnet
|
||||
resource "azurerm_subnet" "subnet" {
|
||||
name = var.subnet_name
|
||||
resource_group_name = azurerm_resource_group.resource_group.name
|
||||
virtual_network_name = azurerm_virtual_network.vnet.name
|
||||
address_prefixes = ["10.0.1.0/24"]
|
||||
|
||||
service_endpoints = ["Microsoft.AzureCosmosDB"]
|
||||
}
|
||||
|
||||
# Create Public IP Address
|
||||
resource "azurerm_public_ip" "public_ip" {
|
||||
name = var.public_ip_name
|
||||
location = azurerm_resource_group.resource_group.location
|
||||
resource_group_name = azurerm_resource_group.resource_group.name
|
||||
allocation_method = "Static"
|
||||
domain_name_label = var.dns
|
||||
}
|
||||
|
||||
resource "azurerm_network_security_group" "nsg" {
|
||||
name = var.nsg_name
|
||||
location = azurerm_resource_group.resource_group.location
|
||||
resource_group_name = azurerm_resource_group.resource_group.name
|
||||
|
||||
security_rule {
|
||||
name = "SSH"
|
||||
priority = 1000
|
||||
direction = "Inbound"
|
||||
access = "Allow"
|
||||
protocol = "Tcp"
|
||||
source_port_range = "*"
|
||||
destination_port_range = "22"
|
||||
source_address_prefix = var.nsg_ssh_ip_range
|
||||
destination_address_prefix = "*"
|
||||
}
|
||||
|
||||
security_rule {
|
||||
name = "HTTP"
|
||||
priority = 1001
|
||||
direction = "Inbound"
|
||||
access = "Allow"
|
||||
protocol = "Tcp"
|
||||
source_port_range = "*"
|
||||
destination_port_range = "80"
|
||||
source_address_prefix = var.nsg_http_ip_range
|
||||
destination_address_prefix = "*"
|
||||
}
|
||||
|
||||
security_rule {
|
||||
name = "HTTPS"
|
||||
priority = 1002
|
||||
direction = "Inbound"
|
||||
access = "Allow"
|
||||
protocol = "Tcp"
|
||||
source_port_range = "*"
|
||||
destination_port_range = "443"
|
||||
source_address_prefix = var.nsg_https_ip_range
|
||||
destination_address_prefix = "*"
|
||||
}
|
||||
}
|
||||
|
||||
# Create Network Interface
|
||||
resource "azurerm_network_interface" "nic" {
|
||||
name = var.network_interface_name
|
||||
location = azurerm_resource_group.resource_group.location
|
||||
resource_group_name = azurerm_resource_group.resource_group.name
|
||||
|
||||
ip_configuration {
|
||||
name = "internal"
|
||||
subnet_id = azurerm_subnet.subnet.id
|
||||
private_ip_address_allocation = "Dynamic"
|
||||
public_ip_address_id = azurerm_public_ip.public_ip.id
|
||||
}
|
||||
}
|
||||
|
||||
resource "azurerm_network_interface_security_group_association" "example" {
|
||||
network_interface_id = azurerm_network_interface.nic.id
|
||||
network_security_group_id = azurerm_network_security_group.nsg.id
|
||||
}
|
||||
5
opentofu/azure/resource_group.tf
Normal file
5
opentofu/azure/resource_group.tf
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# Create Resource Group
|
||||
resource "azurerm_resource_group" "resource_group" {
|
||||
name = var.resource_group_name
|
||||
location = var.location
|
||||
}
|
||||
74
opentofu/azure/storage.tf
Normal file
74
opentofu/azure/storage.tf
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
resource "azurerm_storage_account" "storage_account" {
|
||||
name = var.config_volume_storage_account_name
|
||||
resource_group_name = azurerm_resource_group.resource_group.name
|
||||
location = azurerm_resource_group.resource_group.location
|
||||
account_tier = "Standard"
|
||||
account_replication_type = "LRS"
|
||||
|
||||
depends_on = [azurerm_resource_group.resource_group]
|
||||
}
|
||||
|
||||
resource "azurerm_storage_share" "backend_storage_share" {
|
||||
name = var.backend_storage_share_name
|
||||
storage_account_name = azurerm_storage_account.storage_account.name
|
||||
quota = 1
|
||||
|
||||
depends_on = [azurerm_storage_account.storage_account]
|
||||
}
|
||||
|
||||
resource "null_resource" "upload_file" {
|
||||
provisioner "local-exec" {
|
||||
command = <<EOT
|
||||
az storage file upload \
|
||||
--account-name ${azurerm_storage_account.storage_account.name} \
|
||||
--share-name ${azurerm_storage_share.backend_storage_share.name} \
|
||||
--source ../auth_config.json \
|
||||
--path auth_config.json
|
||||
EOT
|
||||
}
|
||||
|
||||
depends_on = [azurerm_storage_share.backend_storage_share]
|
||||
}
|
||||
|
||||
locals {
|
||||
# Get the current timestamp (UTC)
|
||||
current_timestamp = timestamp()
|
||||
start_time = local.current_timestamp
|
||||
expiry_time = timeadd(local.current_timestamp, "1h")
|
||||
}
|
||||
|
||||
data "azurerm_storage_account_sas" "storage_access" {
|
||||
connection_string = azurerm_storage_account.storage_account.primary_connection_string
|
||||
signed_version = "2022-11-02"
|
||||
|
||||
services {
|
||||
file = true
|
||||
blob = false
|
||||
queue = false
|
||||
table = false
|
||||
}
|
||||
|
||||
resource_types {
|
||||
object = true
|
||||
container = false
|
||||
service = false
|
||||
}
|
||||
|
||||
permissions {
|
||||
read = true
|
||||
write = false
|
||||
delete = false
|
||||
list = true
|
||||
add = false
|
||||
create = false
|
||||
update = false
|
||||
process = false
|
||||
tag = false
|
||||
filter = false
|
||||
}
|
||||
|
||||
start = local.start_time
|
||||
expiry = local.expiry_time
|
||||
|
||||
depends_on = [null_resource.upload_file]
|
||||
}
|
||||
7
opentofu/azure/terraform.tfvars.example
Normal file
7
opentofu/azure/terraform.tfvars.example
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
subscription_id = "subscription_id"
|
||||
backend_session_secret = "secret"
|
||||
backend_email_sender = "mail@mail.com"
|
||||
backend_email_password = "password"
|
||||
backend_jwt_secret = "jwt_secret"
|
||||
vm_user = "username"
|
||||
vm_password = "password"
|
||||
214
opentofu/azure/variables.tf
Normal file
214
opentofu/azure/variables.tf
Normal file
|
|
@ -0,0 +1,214 @@
|
|||
variable "subscription_id" {
|
||||
description = "The azure subscription id"
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "resource_group_name" {
|
||||
description = "The name of the resource group"
|
||||
type = string
|
||||
default = "evaluetonsavoir"
|
||||
}
|
||||
|
||||
variable "location" {
|
||||
description = "The location for resources"
|
||||
type = string
|
||||
default = "Canada Central"
|
||||
}
|
||||
|
||||
variable "frontend_port" {
|
||||
description = "The frontend port"
|
||||
type = number
|
||||
default = 5173
|
||||
}
|
||||
|
||||
variable "backend_port" {
|
||||
description = "The backend port"
|
||||
type = number
|
||||
default = 3000
|
||||
}
|
||||
|
||||
variable "backend_use_port" {
|
||||
description = "If true use port in the backend, else no"
|
||||
type = bool
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "backend_use_auth_student" {
|
||||
description = "If true student need to authenticate, else no"
|
||||
type = bool
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "backend_session_secret" {
|
||||
description = "The backend session secret"
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "backend_email_service" {
|
||||
description = "The name of the service use for sending email"
|
||||
type = string
|
||||
default = "gmail"
|
||||
}
|
||||
|
||||
variable "backend_email_sender" {
|
||||
description = "The email address used to send email"
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "backend_email_password" {
|
||||
description = "The email password"
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "backend_jwt_secret" {
|
||||
description = "The secret used to sign the jwt"
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "backend_storage_share_name" {
|
||||
description = "The backend volume share name"
|
||||
type = string
|
||||
default = "auth-config-share"
|
||||
}
|
||||
|
||||
variable "config_volume_storage_account_name" {
|
||||
description = "The volume storage account name"
|
||||
type = string
|
||||
default = "evaluetonsavoirstorage"
|
||||
}
|
||||
|
||||
variable "mongo_database_name" {
|
||||
description = "The name of the database"
|
||||
type = string
|
||||
default = "evaluetonsavoir"
|
||||
}
|
||||
|
||||
variable "cosmosdb_account_name" {
|
||||
description = "The name of the cosmosdb account"
|
||||
type = string
|
||||
default = "evaluetonsavoircosmosdb"
|
||||
}
|
||||
|
||||
variable "vnet_name" {
|
||||
description = "The name of the virtual network"
|
||||
type = string
|
||||
default = "evaluetonsavoirVnet"
|
||||
}
|
||||
|
||||
variable "subnet_name" {
|
||||
description = "The name of the subnet"
|
||||
type = string
|
||||
default = "evaluetonsavoirSubnet"
|
||||
}
|
||||
|
||||
variable "public_ip_name" {
|
||||
description = "The name of the public ip"
|
||||
type = string
|
||||
default = "evaluetonsavoirPublicIp"
|
||||
}
|
||||
|
||||
variable "nsg_name" {
|
||||
description = "The name of the network security group"
|
||||
type = string
|
||||
default = "evaluetonsavoirnsg"
|
||||
}
|
||||
|
||||
variable "nsg_ssh_ip_range" {
|
||||
description = "The ip range that can access to the port 22 using the network security group"
|
||||
type = string
|
||||
default = "0.0.0.0/0"
|
||||
}
|
||||
|
||||
variable "nsg_http_ip_range" {
|
||||
description = "The ip range that can access to the port 80 using the network security group"
|
||||
type = string
|
||||
default = "0.0.0.0/0"
|
||||
}
|
||||
|
||||
variable "nsg_https_ip_range" {
|
||||
description = "The ip range that can access to the port 443 using the network security group"
|
||||
type = string
|
||||
default = "0.0.0.0/0"
|
||||
}
|
||||
|
||||
variable "network_interface_name" {
|
||||
description = "The name of the network interface"
|
||||
type = string
|
||||
default = "evaluetonsavoirNetworkInterface"
|
||||
}
|
||||
|
||||
variable "dns" {
|
||||
description = "The dns of the public ip"
|
||||
type = string
|
||||
default = "evaluetonsavoir"
|
||||
}
|
||||
|
||||
variable "vm_name" {
|
||||
description = "The name of the virtual machine"
|
||||
type = string
|
||||
default = "evaluetonsavoir"
|
||||
}
|
||||
|
||||
variable "vm_size" {
|
||||
description = "The size of the virtual machine"
|
||||
type = string
|
||||
default = "Standard_B2s"
|
||||
}
|
||||
|
||||
variable "vm_user" {
|
||||
description = "The username of the virtual machine"
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "vm_password" {
|
||||
description = "The password of the virtual machine"
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "vm_os_disk_name" {
|
||||
description = "The name of the os disk of the virtual machine"
|
||||
type = string
|
||||
default = "evaluetonsavoirOsDisk"
|
||||
}
|
||||
|
||||
variable "vm_os_disk_type" {
|
||||
description = "The type of the os disk of the virtual machine"
|
||||
type = string
|
||||
default = "Standard_LRS"
|
||||
}
|
||||
|
||||
variable "vm_image_publisher" {
|
||||
description = "The publisher of the image of the virtual machine"
|
||||
type = string
|
||||
default = "Canonical"
|
||||
}
|
||||
|
||||
variable "vm_image_offer" {
|
||||
description = "The id of the image of the virtual machine"
|
||||
type = string
|
||||
default = "0001-com-ubuntu-server-jammy"
|
||||
}
|
||||
|
||||
variable "vm_image_plan" {
|
||||
description = "The plan of the image of the virtual machine"
|
||||
type = string
|
||||
default = "22_04-lts"
|
||||
}
|
||||
|
||||
variable "vm_image_version" {
|
||||
description = "The version of the image of the virtual machine"
|
||||
type = string
|
||||
default = "latest"
|
||||
}
|
||||
|
||||
variable "docker_compose_url" {
|
||||
description = "The url from where the docker compose file is downloaded"
|
||||
type = string
|
||||
default = "https://raw.githubusercontent.com/ets-cfuhrman-pfe/EvalueTonSavoir/refs/heads/main/opentofu/docker-compose.yaml"
|
||||
}
|
||||
|
||||
variable "quizroom_image" {
|
||||
description = "The image of the quiz room"
|
||||
type = string
|
||||
default = "ghrc.io/fuhrmanator/evaluetonsavoir-quizroom:latest"
|
||||
}
|
||||
80
opentofu/docker-compose.yaml
Normal file
80
opentofu/docker-compose.yaml
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
services:
|
||||
|
||||
frontend:
|
||||
image: ghcr.io/ets-cfuhrman-pfe/evaluetonsavoir-frontend:latest
|
||||
container_name: frontend
|
||||
ports:
|
||||
- "5173:5173"
|
||||
environment:
|
||||
VITE_BACKEND_URL: ${VITE_BACKEND_URL:-http://localhost:3000}
|
||||
networks:
|
||||
- quiz_network
|
||||
restart: always
|
||||
|
||||
backend:
|
||||
image: ghcr.io/ets-cfuhrman-pfe/evaluetonsavoir-backend:latest
|
||||
container_name: backend
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
PORT: ${PORT:-3000}
|
||||
MONGO_URI: ${MONGO_URI:-mongodb://mongo:27017/evaluetonsavoir}
|
||||
MONGO_DATABASE: ${MONGO_DATABASE:-evaluetonsavoir}
|
||||
EMAIL_SERVICE: ${EMAIL_SERVICE:-gmail}
|
||||
SENDER_EMAIL: ${SENDER_EMAIL:-infoevaluetonsavoir@gmail.com}
|
||||
EMAIL_PSW: ${EMAIL_PSW:-'vvml wmfr dkzb vjzb'}
|
||||
JWT_SECRET: ${JWT_SECRET:-haQdgd2jp09qb897GeBZyJetC8ECSpbFJe}
|
||||
FRONTEND_URL: ${FRONTEND_URL:-http://localhost:5173}
|
||||
SESSION_Secret: ${SESSION_Secret:-'lookMomImQuizzing'}
|
||||
SITE_URL: ${SITE_URL:-http://localhost}
|
||||
FRONTEND_PORT: ${FRONTEND_PORT:-5173}
|
||||
USE_PORTS: ${USE_PORTS:-false}
|
||||
AUTHENTICATED_ROOMS: ${AUTHENTICATED_ROOMS:-false}
|
||||
QUIZROOM_IMAGE: ${QUIZROOM_IMAGE:-ghrc.io/fuhrmanator/evaluetonsavoir-quizroom:latest}
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ./auth_config.json:/usr/src/app/serveur/auth_config.json
|
||||
networks:
|
||||
- quiz_network
|
||||
restart: always
|
||||
|
||||
quizroom:
|
||||
image: ghcr.io/ets-cfuhrman-pfe/evaluetonsavoir-quizroom:latest
|
||||
container_name: quizroom
|
||||
ports:
|
||||
- "4500:4500"
|
||||
depends_on:
|
||||
- backend
|
||||
networks:
|
||||
- quiz_network
|
||||
restart: always
|
||||
|
||||
nginx:
|
||||
image: ghcr.io/ets-cfuhrman-pfe/evaluetonsavoir-router:latest
|
||||
container_name: nginx
|
||||
ports:
|
||||
- "80:80"
|
||||
depends_on:
|
||||
- backend
|
||||
- frontend
|
||||
networks:
|
||||
- quiz_network
|
||||
restart: always
|
||||
|
||||
watchtower:
|
||||
image: containrrr/watchtower
|
||||
container_name: watchtower
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
environment:
|
||||
- TZ=America/Montreal
|
||||
- WATCHTOWER_CLEANUP=true
|
||||
- WATCHTOWER_DEBUG=true
|
||||
- WATCHTOWER_INCLUDE_RESTARTING=true
|
||||
- WATCHTOWER_SCHEDULE=0 0 5 * * * # At 5 am everyday
|
||||
restart: always
|
||||
|
||||
networks:
|
||||
quiz_network:
|
||||
name: evaluetonsavoir_quiz_network
|
||||
driver: bridge
|
||||
2
quizRoom/.dockerignore
Normal file
2
quizRoom/.dockerignore
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
Dockerfile
|
||||
docker-compose.yml
|
||||
32
quizRoom/Dockerfile
Normal file
32
quizRoom/Dockerfile
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
# Use the Node base image
|
||||
FROM node:18 AS quizroom
|
||||
|
||||
ENV PORT=4500
|
||||
ENV ROOM_ID=000000
|
||||
|
||||
# Create a working directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Copy package.json and package-lock.json (if available) and install dependencies
|
||||
COPY package*.json ./
|
||||
RUN npm install
|
||||
|
||||
# Copy the rest of the source code to the container
|
||||
COPY . .
|
||||
|
||||
# Ensure healthcheck.sh has execution permissions
|
||||
COPY healthcheck.sh /usr/src/app/healthcheck.sh
|
||||
RUN chmod +x /usr/src/app/healthcheck.sh
|
||||
|
||||
# Build the TypeScript code
|
||||
RUN npm run build
|
||||
|
||||
# Expose WebSocket server port
|
||||
EXPOSE ${PORT}
|
||||
|
||||
# Add healthcheck
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=30s --retries=3 \
|
||||
CMD /usr/src/app/healthcheck.sh
|
||||
|
||||
# Start the server using the compiled JavaScript file
|
||||
CMD ["node", "dist/app.js"]
|
||||
57
quizRoom/app.ts
Normal file
57
quizRoom/app.ts
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
import http from "http";
|
||||
import { Server, ServerOptions } from "socket.io";
|
||||
import { setupWebsocket } from "./socket/setupWebSocket";
|
||||
import dotenv from "dotenv";
|
||||
import express from "express";
|
||||
import os from "os"; // Import the os module
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
|
||||
const port = process.env.PORT || 4500;
|
||||
const roomId = process.env.ROOM_ID;
|
||||
console.log(`I am: /api/room/${roomId}/socket`);
|
||||
|
||||
// Create Express app for health check
|
||||
const app = express();
|
||||
const server = http.createServer(app);
|
||||
|
||||
// Health check endpoint
|
||||
app.get('/health', (_, res) => {
|
||||
try {
|
||||
if (io.engine?.clientsCount !== undefined) {
|
||||
res.status(200).json({
|
||||
status: 'healthy',
|
||||
path: `/api/room/${roomId}/socket`,
|
||||
connections: io.engine.clientsCount,
|
||||
uptime: process.uptime()
|
||||
});
|
||||
} else {
|
||||
throw new Error('Socket.io server not initialized');
|
||||
}
|
||||
} catch (error: Error | any) {
|
||||
res.status(500).json({
|
||||
status: 'unhealthy',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const ioOptions: Partial<ServerOptions> = {
|
||||
path: `/api/room/${roomId}/socket`,
|
||||
cors: {
|
||||
origin: "*",
|
||||
methods: ["GET", "POST"],
|
||||
credentials: true,
|
||||
},
|
||||
};
|
||||
|
||||
const io = new Server(server, ioOptions);
|
||||
|
||||
// Initialize WebSocket setup
|
||||
setupWebsocket(io);
|
||||
|
||||
server.listen(port, () => {
|
||||
console.log(`WebSocket server is running on port ${port}`);
|
||||
});
|
||||
21
quizRoom/docker-compose.yml
Normal file
21
quizRoom/docker-compose.yml
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
version: '3.8'
|
||||
|
||||
services:
|
||||
quizroom:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
- PORT=${PORT:-4500}
|
||||
ports:
|
||||
- "${PORT:-4500}:${PORT:-4500}"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
environment:
|
||||
- PORT=${PORT:-4500}
|
||||
- ROOM_ID=${ROOM_ID}
|
||||
healthcheck:
|
||||
test: curl -f http://localhost:${PORT:-4500}/health || exit 1
|
||||
interval: 30s
|
||||
timeout: 30s
|
||||
retries: 3
|
||||
start_period: 30s
|
||||
2
quizRoom/healthcheck.sh
Normal file
2
quizRoom/healthcheck.sh
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/bash
|
||||
curl -f "http://0.0.0.0:${PORT}/health" || exit 1
|
||||
1595
quizRoom/package-lock.json
generated
Normal file
1595
quizRoom/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
27
quizRoom/package.json
Normal file
27
quizRoom/package.json
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
"name": "quizroom",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "node dist/app.js",
|
||||
"build": "tsc",
|
||||
"dev": "ts-node app.ts"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"@types/dockerode": "^3.3.32",
|
||||
"@types/express": "^5.0.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"dockerode": "^4.0.2",
|
||||
"dotenv": "^16.4.5",
|
||||
"express": "^4.21.1",
|
||||
"http": "^0.0.1-security",
|
||||
"socket.io": "^4.8.1"
|
||||
}
|
||||
}
|
||||
2
quizRoom/socket/.env.example
Normal file
2
quizRoom/socket/.env.example
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
ROOM_ID=123456
|
||||
PORT=4500
|
||||
242
quizRoom/socket/setupWebSocket.ts
Normal file
242
quizRoom/socket/setupWebSocket.ts
Normal file
|
|
@ -0,0 +1,242 @@
|
|||
import { Server, Socket } from "socket.io";
|
||||
import Docker from 'dockerode';
|
||||
import fs from 'fs';
|
||||
|
||||
const MAX_USERS_PER_ROOM = 60;
|
||||
const MAX_TOTAL_CONNECTIONS = 2000;
|
||||
|
||||
export const setupWebsocket = (io: Server): void => {
|
||||
let totalConnections = 0;
|
||||
|
||||
io.on("connection", (socket: Socket) => {
|
||||
if (totalConnections >= MAX_TOTAL_CONNECTIONS) {
|
||||
console.log("Connection limit reached. Disconnecting client.");
|
||||
socket.emit("join-failure", "Le nombre maximum de connexions a été atteint");
|
||||
socket.disconnect(true);
|
||||
return;
|
||||
}
|
||||
|
||||
totalConnections++;
|
||||
console.log("A user connected:", socket.id, "| Total connections:", totalConnections);
|
||||
|
||||
socket.on("create-room", (sentRoomName) => {
|
||||
// Ensure sentRoomName is a string before applying toUpperCase()
|
||||
const roomName = (typeof sentRoomName === "string" && sentRoomName.trim() !== "")
|
||||
? sentRoomName.toUpperCase()
|
||||
: generateRoomName();
|
||||
|
||||
console.log(`Created room with name: ${roomName}`);
|
||||
if (!io.sockets.adapter.rooms.get(roomName)) {
|
||||
socket.join(roomName);
|
||||
socket.emit("create-success", roomName);
|
||||
} else {
|
||||
socket.emit("create-failure");
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("join-room", ({ enteredRoomName, username }: { enteredRoomName: string; username: string }) => {
|
||||
if (io.sockets.adapter.rooms.has(enteredRoomName)) {
|
||||
const clientsInRoom = io.sockets.adapter.rooms.get(enteredRoomName)?.size || 0;
|
||||
|
||||
if (clientsInRoom <= MAX_USERS_PER_ROOM) {
|
||||
socket.join(enteredRoomName);
|
||||
socket.to(enteredRoomName).emit("user-joined", { id: socket.id, name: username, answers: [] });
|
||||
socket.emit("join-success");
|
||||
} else {
|
||||
socket.emit("join-failure", "La salle est remplie");
|
||||
}
|
||||
} else {
|
||||
socket.emit("join-failure", "Le nom de la salle n'existe pas");
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("next-question", ({ roomName, question }: { roomName: string; question: string }) => {
|
||||
socket.to(roomName).emit("next-question", question);
|
||||
});
|
||||
|
||||
socket.on("launch-student-mode", ({ roomName, questions }: { roomName: string; questions: string[] }) => {
|
||||
socket.to(roomName).emit("launch-student-mode", questions);
|
||||
});
|
||||
|
||||
socket.on("end-quiz", ({ roomName }: { roomName: string }) => {
|
||||
socket.to(roomName).emit("end-quiz");
|
||||
});
|
||||
|
||||
socket.on("message", (data: string) => {
|
||||
console.log("Received message from", socket.id, ":", data);
|
||||
});
|
||||
|
||||
socket.on("disconnect", () => {
|
||||
totalConnections--;
|
||||
console.log("A user disconnected:", socket.id, "| Total connections:", totalConnections);
|
||||
|
||||
for (const [room] of io.sockets.adapter.rooms) {
|
||||
if (room !== socket.id) {
|
||||
io.to(room).emit("user-disconnected", socket.id);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("submit-answer", ({
|
||||
roomName,
|
||||
username,
|
||||
answer,
|
||||
idQuestion,
|
||||
}: {
|
||||
roomName: string;
|
||||
username: string;
|
||||
answer: string;
|
||||
idQuestion: string;
|
||||
}) => {
|
||||
socket.to(roomName).emit("submit-answer-room", {
|
||||
idUser: socket.id,
|
||||
username,
|
||||
answer,
|
||||
idQuestion,
|
||||
});
|
||||
});
|
||||
|
||||
socket.on("error", (error) => {
|
||||
console.error("WebSocket server error:", error);
|
||||
});
|
||||
|
||||
|
||||
// Stress Testing
|
||||
|
||||
socket.on("message-from-teacher", ({ roomName, message }: { roomName: string; message: string }) => {
|
||||
console.log(`Message reçu dans la salle ${roomName} : ${message}`);
|
||||
socket.to(roomName).emit("message-sent-teacher", { message });
|
||||
});
|
||||
|
||||
socket.on("message-from-student", ({ roomName, message }: { roomName: string; message: string }) => {
|
||||
console.log(`Message reçu dans la salle ${roomName} : ${message}`);
|
||||
socket.to(roomName).emit("message-sent-student", { message });
|
||||
});
|
||||
|
||||
interface ContainerStats {
|
||||
containerId: string;
|
||||
containerName: string;
|
||||
memoryUsedMB: number | null;
|
||||
memoryUsedPercentage: number | null;
|
||||
cpuUsedPercentage: number | null;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
class ContainerMetrics {
|
||||
private docker: Docker;
|
||||
private containerName: string;
|
||||
|
||||
private bytesToMB(bytes: number): number {
|
||||
return Math.round(bytes / (1024 * 1024));
|
||||
}
|
||||
|
||||
constructor() {
|
||||
this.docker = new Docker({
|
||||
socketPath: process.platform === 'win32' ? '//./pipe/docker_engine' : '/var/run/docker.sock'
|
||||
});
|
||||
this.containerName = `room_${process.env.ROOM_ID}`;
|
||||
}
|
||||
|
||||
private async getContainerNetworks(containerId: string): Promise<string[]> {
|
||||
const container = this.docker.getContainer(containerId);
|
||||
const info = await container.inspect();
|
||||
return Object.keys(info.NetworkSettings.Networks);
|
||||
}
|
||||
|
||||
public async getAllContainerStats(): Promise<ContainerStats[]> {
|
||||
try {
|
||||
// First get our container to find its networks
|
||||
const ourContainer = await this.docker.listContainers({
|
||||
all: true,
|
||||
filters: { name: [this.containerName] }
|
||||
});
|
||||
|
||||
if (!ourContainer.length) {
|
||||
throw new Error(`Container ${this.containerName} not found`);
|
||||
}
|
||||
|
||||
const ourNetworks = await this.getContainerNetworks(ourContainer[0].Id);
|
||||
|
||||
// Get all containers
|
||||
const allContainers = await this.docker.listContainers();
|
||||
|
||||
// Get stats for containers on the same networks
|
||||
const containerStats = await Promise.all(
|
||||
allContainers.map(async (container): Promise<ContainerStats | null> => {
|
||||
try {
|
||||
const containerNetworks = await this.getContainerNetworks(container.Id);
|
||||
// Check if container shares any network with our container
|
||||
if (!containerNetworks.some(network => ourNetworks.includes(network))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const stats = await this.docker.getContainer(container.Id).stats({ stream: false });
|
||||
|
||||
const memoryStats = {
|
||||
usage: stats.memory_stats.usage,
|
||||
limit: stats.memory_stats.limit || 0,
|
||||
percent: stats.memory_stats.limit ? (stats.memory_stats.usage / stats.memory_stats.limit) * 100 : 0
|
||||
};
|
||||
|
||||
const cpuDelta = stats.cpu_stats?.cpu_usage?.total_usage - (stats.precpu_stats?.cpu_usage?.total_usage || 0);
|
||||
const systemDelta = stats.cpu_stats?.system_cpu_usage - (stats.precpu_stats?.system_cpu_usage || 0);
|
||||
const cpuPercent = systemDelta > 0 ? (cpuDelta / systemDelta) * (stats.cpu_stats?.online_cpus || 1) * 100 : 0;
|
||||
|
||||
return {
|
||||
containerId: container.Id,
|
||||
containerName: container.Names[0].replace(/^\//, ''),
|
||||
memoryUsedMB: this.bytesToMB(memoryStats.usage),
|
||||
memoryUsedPercentage: memoryStats.percent,
|
||||
cpuUsedPercentage: cpuPercent
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
containerId: container.Id,
|
||||
containerName: container.Names[0].replace(/^\//, ''),
|
||||
memoryUsedMB: null,
|
||||
memoryUsedPercentage: null,
|
||||
cpuUsedPercentage: null,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Change the filter to use proper type predicate
|
||||
return containerStats.filter((stats): stats is ContainerStats => stats !== null);
|
||||
} catch (error) {
|
||||
console.error('Stats error:', error);
|
||||
return [{
|
||||
containerId: 'unknown',
|
||||
containerName: 'unknown',
|
||||
memoryUsedMB: null,
|
||||
memoryUsedPercentage: null,
|
||||
cpuUsedPercentage: null,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
}];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const containerMetrics = new ContainerMetrics();
|
||||
|
||||
socket.on("get-usage", async () => {
|
||||
try {
|
||||
const usageData = await containerMetrics.getAllContainerStats();
|
||||
socket.emit("usage-data", usageData);
|
||||
} catch (error) {
|
||||
socket.emit("error", { message: "Failed to retrieve usage data" });
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
const generateRoomName = (length = 6): string => {
|
||||
const characters = "0123456789";
|
||||
let result = "";
|
||||
for (let i = 0; i < length; i++) {
|
||||
result += characters.charAt(Math.floor(Math.random() * characters.length));
|
||||
}
|
||||
return result;
|
||||
};
|
||||
};
|
||||
14
quizRoom/tsconfig.json
Normal file
14
quizRoom/tsconfig.json
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES6",
|
||||
"module": "commonjs",
|
||||
"outDir": "./dist",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": ["./**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
||||
|
|
@ -8,6 +8,10 @@ RUN npm install
|
|||
|
||||
COPY ./ .
|
||||
|
||||
EXPOSE 4400
|
||||
ENV PORT=3000
|
||||
EXPOSE ${PORT}
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:${PORT}/health || exit 1
|
||||
|
||||
CMD ["npm", "run", "start"]
|
||||
8
server/__mocks__/AppError.js
Normal file
8
server/__mocks__/AppError.js
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
class AppError extends Error {
|
||||
constructor(message, statusCode) {
|
||||
super(message);
|
||||
this.statusCode = statusCode;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AppError;
|
||||
6
server/__mocks__/bcrypt.js
Normal file
6
server/__mocks__/bcrypt.js
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
const mockBcrypt = {
|
||||
hash: jest.fn().mockResolvedValue('hashedPassword'),
|
||||
compare: jest.fn().mockResolvedValue(true),
|
||||
};
|
||||
|
||||
module.exports = mockBcrypt;
|
||||
20
server/__mocks__/db.js
Normal file
20
server/__mocks__/db.js
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
class MockDBConnection {
|
||||
constructor() {
|
||||
this.db = jest.fn().mockReturnThis();
|
||||
this.collection = jest.fn().mockReturnThis();
|
||||
this.insertOne = jest.fn();
|
||||
this.findOne = jest.fn();
|
||||
this.updateOne = jest.fn();
|
||||
this.deleteOne = jest.fn();
|
||||
}
|
||||
|
||||
async connect() {
|
||||
// Simulate successful connection
|
||||
}
|
||||
|
||||
getConnection() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MockDBConnection;
|
||||
8
server/__mocks__/folders.js
Normal file
8
server/__mocks__/folders.js
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
const mockFolders = {
|
||||
create: jest.fn(),
|
||||
find: jest.fn(),
|
||||
update: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
};
|
||||
|
||||
module.exports = mockFolders;
|
||||
441
server/__tests__/folders.test.js
Normal file
441
server/__tests__/folders.test.js
Normal file
|
|
@ -0,0 +1,441 @@
|
|||
const { create } = require('../middleware/jwtToken');
|
||||
const Folders = require('../models/folders');
|
||||
const ObjectId = require('mongodb').ObjectId;
|
||||
const Quizzes = require('../models/quiz');
|
||||
|
||||
describe('Folders', () => {
|
||||
let folders;
|
||||
let db;
|
||||
let collection;
|
||||
let quizzes;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks(); // Clear any previous mock calls
|
||||
|
||||
// Mock the collection object
|
||||
collection = {
|
||||
findOne: jest.fn(),
|
||||
insertOne: jest.fn(),
|
||||
find: jest.fn().mockReturnValue({ toArray: jest.fn() }), // Mock the find method
|
||||
deleteOne: jest.fn(),
|
||||
deleteMany: jest.fn(),
|
||||
updateOne: jest.fn(),
|
||||
};
|
||||
|
||||
// Mock the database connection
|
||||
db = {
|
||||
connect: jest.fn(),
|
||||
getConnection: jest.fn().mockReturnThis(), // Add getConnection method
|
||||
collection: jest.fn().mockReturnValue(collection),
|
||||
};
|
||||
|
||||
quizzes = new Quizzes(db);
|
||||
folders = new Folders(db, quizzes);
|
||||
|
||||
});
|
||||
|
||||
// create
|
||||
describe('create', () => {
|
||||
it('should create a new folder and return the new folder ID', async () => {
|
||||
const title = 'Test Folder';
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue(null);
|
||||
collection.insertOne.mockResolvedValue({ insertedId: new ObjectId() });
|
||||
|
||||
const result = await folders.create(title, '12345');
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ title, userId: '12345' });
|
||||
expect(collection.insertOne).toHaveBeenCalledWith(expect.objectContaining({ title, userId: '12345' }));
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
// throw an error if userId is undefined
|
||||
it('should throw an error if userId is undefined', async () => {
|
||||
const title = 'Test Folder';
|
||||
|
||||
await expect(folders.create(title, undefined)).rejects.toThrow('Missing required parameter(s)');
|
||||
|
||||
expect(db.connect).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw an error if the folder already exists', async () => {
|
||||
const title = 'Existing Folder';
|
||||
const userId = '66fc70bea1b9e87655cf17c9';
|
||||
|
||||
// Mock the database response of a found folder
|
||||
collection.findOne.mockResolvedValue(
|
||||
// real result from mongosh
|
||||
{
|
||||
_id: ObjectId.createFromHexString('66fd33fd81758a882ce99aae'),
|
||||
userId: userId,
|
||||
title: title,
|
||||
created_at: new Date('2024-10-02T11:52:29.797Z')
|
||||
}
|
||||
);
|
||||
|
||||
await expect(folders.create(title, userId)).rejects.toThrow('Folder already exists');
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ title, userId: userId });
|
||||
});
|
||||
});
|
||||
|
||||
// getUserFolders
|
||||
describe('getUserFolders', () => {
|
||||
it('should return all folders for a user', async () => {
|
||||
const userId = '12345';
|
||||
const userFolders = [
|
||||
{ title: 'Folder 1', userId },
|
||||
{ title: 'Folder 2', userId },
|
||||
];
|
||||
|
||||
// Mock the database response
|
||||
collection.find().toArray.mockResolvedValue(userFolders);
|
||||
|
||||
const result = await folders.getUserFolders(userId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.find).toHaveBeenCalledWith({ userId });
|
||||
expect(result).toEqual(userFolders);
|
||||
});
|
||||
});
|
||||
|
||||
// getOwner
|
||||
describe('getOwner', () => {
|
||||
it('should return the owner of a folder', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const userId = '12345';
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue({ userId });
|
||||
|
||||
const result = await folders.getOwner(folderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) });
|
||||
expect(result).toBe(userId);
|
||||
});
|
||||
});
|
||||
|
||||
// write a test for getContent
|
||||
describe('getContent', () => {
|
||||
it('should return the content of a folder', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const content = [
|
||||
{ title: 'Quiz 1', content: [] },
|
||||
{ title: 'Quiz 2', content: [] },
|
||||
];
|
||||
|
||||
// Mock the database response
|
||||
collection.find().toArray.mockResolvedValue(content);
|
||||
|
||||
const result = await folders.getContent(folderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('files');
|
||||
expect(collection.find).toHaveBeenCalledWith({ folderId });
|
||||
expect(result).toEqual(content);
|
||||
});
|
||||
|
||||
it('should return an empty array if the folder has no content', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// Mock the database response
|
||||
collection.find().toArray.mockResolvedValue([]);
|
||||
|
||||
const result = await folders.getContent(folderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('files');
|
||||
expect(collection.find).toHaveBeenCalledWith({ folderId });
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// delete
|
||||
describe('delete', () => {
|
||||
it('should delete a folder and return true', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// Mock the database response
|
||||
collection.deleteOne.mockResolvedValue({ deletedCount: 1 });
|
||||
|
||||
|
||||
// Mock the folders.quizModel.deleteQuizzesByFolderId()
|
||||
jest.spyOn(quizzes, 'deleteQuizzesByFolderId').mockResolvedValue(true);
|
||||
|
||||
const result = await folders.delete(folderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.deleteOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) });
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if the folder does not exist', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// Mock the database response
|
||||
collection.deleteOne.mockResolvedValue({ deletedCount: 0 });
|
||||
|
||||
const result = await folders.delete(folderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.deleteOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) });
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// rename
|
||||
describe('rename', () => {
|
||||
it('should rename a folder and return true', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const newTitle = 'New Folder Name';
|
||||
|
||||
// Mock the database response
|
||||
collection.updateOne.mockResolvedValue({ modifiedCount: 1 });
|
||||
|
||||
const result = await folders.rename(folderId, newTitle);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.updateOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) }, { $set: { title: newTitle } });
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if the folder does not exist', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const newTitle = 'New Folder Name';
|
||||
|
||||
// Mock the database response
|
||||
collection.updateOne.mockResolvedValue({ modifiedCount: 0 });
|
||||
|
||||
const result = await folders.rename(folderId, newTitle);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.updateOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) }, { $set: { title: newTitle } });
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// duplicate
|
||||
describe('duplicate', () => {
|
||||
it('should duplicate a folder and return the new folder ID', async () => {
|
||||
const userId = '12345';
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const sourceFolder = {title: 'SourceFolder', userId: userId, content: []};
|
||||
const duplicatedFolder = {title: 'SourceFolder (1)', userId: userId, created_at: expect.any(Date), content: []};
|
||||
|
||||
// Mock the database responses for the folder and the new folder (first one is found, second one is null)
|
||||
// mock the findOne method
|
||||
jest.spyOn(collection, 'findOne')
|
||||
.mockResolvedValueOnce(sourceFolder) // source file exists
|
||||
.mockResolvedValueOnce(null); // new name is not found
|
||||
|
||||
// Mock the folder create method
|
||||
const createSpy = jest.spyOn(folders, 'create').mockResolvedValue(new ObjectId());
|
||||
|
||||
// mock the folder.getContent method
|
||||
jest.spyOn(folders, 'getContent').mockResolvedValue([{ title: 'Quiz 1', content: [] }]);
|
||||
|
||||
// Mock the quizzes.create method
|
||||
jest.spyOn(quizzes, 'create').mockResolvedValue(new ObjectId());
|
||||
|
||||
const result = await folders.duplicate(folderId, userId);
|
||||
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
|
||||
// expect folders.create method was called
|
||||
expect(createSpy).toHaveBeenCalledWith(duplicatedFolder.title, userId);
|
||||
// expect the getContent method was called
|
||||
expect(folders.getContent).toHaveBeenCalledWith(folderId);
|
||||
// expect the quizzes.create method was called
|
||||
expect(quizzes.create).toHaveBeenCalledWith('Quiz 1', [], expect.any(String), userId);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
it('should throw an error if the folder does not exist', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// Mock the database response for the source
|
||||
collection.findOne.mockResolvedValue(null);
|
||||
|
||||
await expect(folders.duplicate(folderId, '54321')).rejects.toThrow(`Folder ${folderId} not found`);
|
||||
|
||||
// expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId), userId: '54321' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('folderExists', () => {
|
||||
it('should return true if folder exists', async () => {
|
||||
const title = 'Test Folder';
|
||||
const userId = '12345';
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue({ title, userId });
|
||||
|
||||
const result = await folders.folderExists(title, userId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ title, userId });
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if folder does not exist', async () => {
|
||||
const title = 'Nonexistent Folder';
|
||||
const userId = '12345';
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue(null);
|
||||
|
||||
const result = await folders.folderExists(title, userId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ title, userId });
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('copy', () => {
|
||||
it('should copy a folder and return the new folder ID', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const userId = '12345';
|
||||
const newFolderId = new ObjectId();
|
||||
// Mock some quizzes that are in folder.content
|
||||
const sourceFolder = {
|
||||
title: 'Test Folder',
|
||||
content: [
|
||||
{ title: 'Quiz 1', content: [] },
|
||||
{ title: 'Quiz 2', content: [] },
|
||||
],
|
||||
};
|
||||
|
||||
// Mock the response from getFolderWithContent
|
||||
jest.spyOn(folders, 'getFolderWithContent').mockResolvedValue(sourceFolder);
|
||||
jest.spyOn(folders, 'create').mockResolvedValue(newFolderId);
|
||||
// Mock the response from Quiz.createQuiz
|
||||
jest.spyOn(quizzes, 'create').mockImplementation(() => {});
|
||||
|
||||
const result = await folders.copy(folderId, userId);
|
||||
|
||||
// expect(db.connect).toHaveBeenCalled();
|
||||
// expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
// expect(collection.findOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) });
|
||||
// expect(collection.insertOne).toHaveBeenCalledWith(expect.objectContaining({ userId }));
|
||||
expect(result).toBe(newFolderId);
|
||||
});
|
||||
|
||||
it('should throw an error if the folder does not exist', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const userId = '12345';
|
||||
|
||||
// Mock the response from getFolderWithContent
|
||||
jest.spyOn(folders, 'getFolderWithContent').mockImplementation(() => {
|
||||
throw new Error(`Folder ${folderId} not found`);
|
||||
});
|
||||
|
||||
await expect(folders.copy(folderId, userId)).rejects.toThrow(`Folder ${folderId} not found`);
|
||||
|
||||
// expect(db.connect).toHaveBeenCalled();
|
||||
// expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
// expect(collection.findOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) });
|
||||
});
|
||||
});
|
||||
|
||||
// write a test for getFolderWithContent
|
||||
describe('getFolderWithContent', () => {
|
||||
it('should return a folder with content', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const folder = {
|
||||
_id: new ObjectId(folderId),
|
||||
title: 'Test Folder',
|
||||
};
|
||||
const content = {
|
||||
content : [
|
||||
{ title: 'Quiz 1', content: [] },
|
||||
{ title: 'Quiz 2', content: [] },
|
||||
]};
|
||||
|
||||
// Mock the response from getFolderById
|
||||
jest.spyOn(folders, 'getFolderById').mockResolvedValue(folder);
|
||||
|
||||
// Mock the response from getContent
|
||||
jest.spyOn(folders, 'getContent').mockResolvedValue(content);
|
||||
|
||||
const result = await folders.getFolderWithContent(folderId);
|
||||
|
||||
// expect(db.connect).toHaveBeenCalled();
|
||||
// expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
// expect(collection.findOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) });
|
||||
expect(result).toEqual({
|
||||
...folder,
|
||||
content: content
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error if the folder does not exist', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// // Mock the database response
|
||||
// collection.findOne.mockResolvedValue(null);
|
||||
|
||||
// Mock getFolderById to throw an error
|
||||
jest.spyOn(folders, 'getFolderById').mockImplementation(() => {
|
||||
throw new Error(`Folder ${folderId} not found`);
|
||||
});
|
||||
|
||||
await expect(folders.getFolderWithContent(folderId)).rejects.toThrow(`Folder ${folderId} not found`);
|
||||
|
||||
// expect(db.connect).toHaveBeenCalled();
|
||||
// expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
// expect(collection.findOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) });
|
||||
});
|
||||
});
|
||||
|
||||
// write a test for getFolderById
|
||||
describe('getFolderById', () => {
|
||||
it('should return a folder by ID', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const folder = {
|
||||
_id: new ObjectId(folderId),
|
||||
title: 'Test Folder',
|
||||
};
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue(folder);
|
||||
|
||||
const result = await folders.getFolderById(folderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) });
|
||||
expect(result).toEqual(folder);
|
||||
});
|
||||
|
||||
it('should throw an error if the folder does not exist', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue(null);
|
||||
|
||||
await expect(folders.getFolderById(folderId)).resolves.toThrow(`Folder ${folderId} not found`);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection).toHaveBeenCalledWith('folders');
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ _id: new ObjectId(folderId) });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,11 +1,11 @@
|
|||
const request = require('supertest');
|
||||
const app = require('../app.js');
|
||||
// const app = require('../routers/images.js');
|
||||
const { response } = require('express');
|
||||
// const request = require('supertest');
|
||||
// const app = require('../app.js');
|
||||
// // const app = require('../routers/images.js');
|
||||
// const { response } = require('express');
|
||||
|
||||
const BASE_URL = '/image'
|
||||
// const BASE_URL = '/image'
|
||||
|
||||
describe("POST /upload", () => {
|
||||
describe.skip("POST /upload", () => {
|
||||
|
||||
describe("when the jwt is not sent", () => {
|
||||
|
||||
|
|
@ -44,7 +44,7 @@ describe("POST /upload", () => {
|
|||
|
||||
})
|
||||
|
||||
describe("GET /get", () => {
|
||||
describe.skip("GET /get", () => {
|
||||
|
||||
describe("when not give id", () => {
|
||||
|
||||
|
|
@ -61,4 +61,4 @@ describe("GET /get", () => {
|
|||
|
||||
})
|
||||
|
||||
})
|
||||
})
|
||||
|
|
|
|||
347
server/__tests__/quizzes.test.js
Normal file
347
server/__tests__/quizzes.test.js
Normal file
|
|
@ -0,0 +1,347 @@
|
|||
const { ObjectId } = require('mongodb');
|
||||
const Quizzes = require('../models/quiz'); // Adjust the path as necessary
|
||||
|
||||
describe('Quizzes', () => {
|
||||
let db;
|
||||
let quizzes;
|
||||
let collection;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks(); // Clear any previous mock calls
|
||||
|
||||
// Mock the collection object
|
||||
collection = {
|
||||
findOne: jest.fn(),
|
||||
insertOne: jest.fn(),
|
||||
find: jest.fn().mockReturnValue({ toArray: jest.fn() }), // Mock the find method
|
||||
deleteOne: jest.fn(),
|
||||
deleteMany: jest.fn(),
|
||||
updateOne: jest.fn(),
|
||||
getContent: jest.fn(),
|
||||
};
|
||||
|
||||
// Mock the database connection
|
||||
db = {
|
||||
connect: jest.fn(),
|
||||
getConnection: jest.fn().mockReturnValue({
|
||||
collection: jest.fn().mockReturnValue(collection),
|
||||
}),
|
||||
};
|
||||
|
||||
// Initialize the Quiz model with the mocked db
|
||||
quizzes = new Quizzes(db);
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create a new quiz if it does not exist', async () => {
|
||||
const title = 'Test Quiz';
|
||||
const content = 'This is a test quiz.';
|
||||
const folderId = '507f1f77bcf86cd799439011';
|
||||
const userId = '12345';
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue(null);
|
||||
collection.insertOne.mockResolvedValue({ insertedId: new ObjectId() });
|
||||
|
||||
const result = await quizzes.create(title, content, folderId, userId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ title, folderId, userId });
|
||||
expect(collection.insertOne).toHaveBeenCalledWith(expect.objectContaining({
|
||||
folderId,
|
||||
userId,
|
||||
title,
|
||||
content,
|
||||
created_at: expect.any(Date),
|
||||
updated_at: expect.any(Date),
|
||||
}));
|
||||
expect(result).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should throw exception if the quiz already exists', async () => {
|
||||
const title = 'Test Quiz';
|
||||
const content = 'This is a test quiz.';
|
||||
const folderId = '507f1f77bcf86cd799439011';
|
||||
const userId = '12345';
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue({ title });
|
||||
|
||||
await expect(quizzes.create(title, content, folderId, userId)).rejects.toThrow(`Quiz already exists with title: ${title}, folderId: ${folderId}, userId: ${userId}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getOwner', () => {
|
||||
it('should return the owner of the quiz', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const userId = '12345';
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue({ userId });
|
||||
|
||||
const result = await quizzes.getOwner(quizId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ _id: ObjectId.createFromHexString(quizId) });
|
||||
expect(result).toBe(userId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getContent', () => {
|
||||
it('should return the content of the quiz', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const content = 'This is a test quiz.';
|
||||
|
||||
// Mock the database response
|
||||
collection.findOne.mockResolvedValue({ content });
|
||||
|
||||
const result = await quizzes.getContent(quizId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.findOne).toHaveBeenCalledWith({ _id: ObjectId.createFromHexString(quizId) });
|
||||
expect(result).toEqual({ content });
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete the quiz', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// Mock the database response
|
||||
collection.deleteOne.mockResolvedValue({deletedCount: 1});
|
||||
|
||||
await quizzes.delete(quizId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.deleteOne).toHaveBeenCalledWith({ _id: ObjectId.createFromHexString(quizId) });
|
||||
});
|
||||
|
||||
it('should return false if the quiz does not exist', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// Mock the database response
|
||||
collection.deleteOne.mockResolvedValue({deletedCount: 0});
|
||||
|
||||
const result = await quizzes.delete(quizId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.deleteOne).toHaveBeenCalledWith({ _id: ObjectId.createFromHexString(quizId) });
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// deleteQuizzesByFolderId
|
||||
describe('deleteQuizzesByFolderId', () => {
|
||||
it('should delete all quizzes in a folder', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// Mock the database response
|
||||
collection.deleteMany.mockResolvedValue({deletedCount: 2});
|
||||
|
||||
await quizzes.deleteQuizzesByFolderId(folderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.deleteMany).toHaveBeenCalledWith({ folderId });
|
||||
});
|
||||
|
||||
it('should return false if no quizzes are deleted', async () => {
|
||||
const folderId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
|
||||
// Mock the database response
|
||||
collection.deleteMany.mockResolvedValue({deletedCount: 0});
|
||||
|
||||
const result = await quizzes.deleteQuizzesByFolderId(folderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.deleteMany).toHaveBeenCalledWith({ folderId });
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// update
|
||||
describe('update', () => {
|
||||
it('should update the title and content of the quiz', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const newTitle = 'Updated Quiz';
|
||||
const newContent = 'This is an updated quiz.';
|
||||
|
||||
// Mock the database response
|
||||
collection.updateOne.mockResolvedValue({modifiedCount: 1});
|
||||
|
||||
await quizzes.update(quizId, newTitle, newContent);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.updateOne).toHaveBeenCalledWith(
|
||||
{ _id: ObjectId.createFromHexString(quizId) },
|
||||
{ $set: { title: newTitle, content: newContent, updated_at: expect.any(Date) } }
|
||||
);
|
||||
});
|
||||
|
||||
it('should return false if the quiz does not exist', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const newTitle = 'Updated Quiz';
|
||||
const newContent = 'This is an updated quiz.';
|
||||
|
||||
// Mock the database response
|
||||
collection.updateOne.mockResolvedValue({modifiedCount: 0});
|
||||
|
||||
const result = await quizzes.update(quizId, newTitle, newContent);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.updateOne).toHaveBeenCalledWith(
|
||||
{ _id: ObjectId.createFromHexString(quizId) },
|
||||
{ $set: { title: newTitle, content: newContent, updated_at: expect.any(Date) } }
|
||||
);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// move
|
||||
describe('move', () => {
|
||||
it('should move the quiz to a new folder', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const newFolderId = '507f1f77bcf86cd799439011';
|
||||
|
||||
// Mock the database response
|
||||
collection.updateOne.mockResolvedValue({modifiedCount: 1});
|
||||
|
||||
await quizzes.move(quizId, newFolderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.updateOne).toHaveBeenCalledWith(
|
||||
{ _id: ObjectId.createFromHexString(quizId) },
|
||||
{ $set: { folderId: newFolderId } }
|
||||
);
|
||||
});
|
||||
|
||||
it('should return false if the quiz does not exist', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const newFolderId = '507f1f77bcf86cd799439011';
|
||||
|
||||
// Mock the database response
|
||||
collection.updateOne.mockResolvedValue({modifiedCount: 0});
|
||||
|
||||
const result = await quizzes.move(quizId, newFolderId);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.getConnection).toHaveBeenCalled();
|
||||
expect(collection.updateOne).toHaveBeenCalledWith(
|
||||
{ _id: ObjectId.createFromHexString(quizId) },
|
||||
{ $set: { folderId: newFolderId } }
|
||||
);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// duplicate
|
||||
describe('duplicate', () => {
|
||||
|
||||
it('should duplicate the quiz and return the new quiz ID', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const userId = '12345';
|
||||
const newQuizId = ObjectId.createFromTime(Math.floor(Date.now() / 1000)); // Corrected ObjectId creation
|
||||
const sourceQuiz = {
|
||||
title: 'Test Quiz',
|
||||
content: 'This is a test quiz.',
|
||||
};
|
||||
|
||||
const createMock = jest.spyOn(quizzes, 'create').mockResolvedValue(newQuizId);
|
||||
// mock the findOne method
|
||||
jest.spyOn(collection, 'findOne')
|
||||
.mockResolvedValueOnce(sourceQuiz) // source quiz exists
|
||||
.mockResolvedValueOnce(null); // new name is not found
|
||||
|
||||
const result = await quizzes.duplicate(quizId, userId);
|
||||
|
||||
expect(result).toBe(newQuizId);
|
||||
|
||||
// Ensure mocks were called correctly
|
||||
expect(createMock).toHaveBeenCalledWith(
|
||||
sourceQuiz.title + ' (1)',
|
||||
sourceQuiz.content,
|
||||
undefined,
|
||||
userId
|
||||
);
|
||||
});
|
||||
|
||||
// Add test case for quizExists (name with number in parentheses)
|
||||
it('should create a new title if the quiz title already exists and ends with " (1)"', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const userId = '12345';
|
||||
const newQuizId = ObjectId.createFromTime(Math.floor(Date.now() / 1000));
|
||||
const sourceQuiz = {
|
||||
title: 'Test Quiz (1)',
|
||||
content: 'This is a test quiz.',
|
||||
};
|
||||
|
||||
const createMock = jest.spyOn(quizzes, 'create').mockResolvedValue(newQuizId);
|
||||
// mock the findOne method
|
||||
jest.spyOn(collection, 'findOne')
|
||||
.mockResolvedValueOnce(sourceQuiz) // source quiz exists
|
||||
.mockResolvedValueOnce(null); // new name is not found
|
||||
|
||||
const result = await quizzes.duplicate(quizId, userId);
|
||||
|
||||
expect(result).toBe(newQuizId);
|
||||
|
||||
// Ensure mocks were called correctly
|
||||
expect(createMock).toHaveBeenCalledWith(
|
||||
'Test Quiz (2)',
|
||||
sourceQuiz.content,
|
||||
undefined,
|
||||
userId
|
||||
);
|
||||
});
|
||||
|
||||
// test case for duplication of "C (1)" but "C (2)" already exists, so it should create "C (3)"
|
||||
it('should create a new title if the quiz title already exists and ends with " (n)" but the incremented n also exists', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const userId = '12345';
|
||||
const newQuizId = ObjectId.createFromTime(Math.floor(Date.now() / 1000));
|
||||
const sourceQuiz = {
|
||||
title: 'Test Quiz (1)',
|
||||
content: 'This is a test quiz.',
|
||||
};
|
||||
|
||||
const createMock = jest.spyOn(quizzes, 'create').mockResolvedValue(newQuizId);
|
||||
|
||||
// mock the findOne method
|
||||
jest.spyOn(collection, 'findOne')
|
||||
.mockResolvedValueOnce(sourceQuiz) // source quiz exists
|
||||
.mockResolvedValueOnce({ title: 'Test Quiz (2)' }) // new name collision
|
||||
.mockResolvedValueOnce(null); // final new name is not found
|
||||
|
||||
const result = await quizzes.duplicate(quizId, userId);
|
||||
|
||||
expect(result).toBe(newQuizId);
|
||||
|
||||
// Ensure mocks were called correctly
|
||||
expect(createMock).toHaveBeenCalledWith(
|
||||
'Test Quiz (3)',
|
||||
sourceQuiz.content,
|
||||
undefined,
|
||||
userId
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if the quiz does not exist', async () => {
|
||||
const quizId = '60c72b2f9b1d8b3a4c8e4d3b';
|
||||
const userId = '12345';
|
||||
|
||||
// Mock the response from getContent
|
||||
jest.spyOn(quizzes, 'getContent').mockResolvedValue(null);
|
||||
|
||||
await expect(quizzes.duplicate(quizId, userId)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -5,7 +5,8 @@ const { setupWebsocket } = require("../socket/socket");
|
|||
|
||||
process.env.NODE_ENV = "test";
|
||||
|
||||
const BACKEND_PORT = 4400;
|
||||
// pick a random port number for testing
|
||||
const BACKEND_PORT = Math.ceil(Math.random() * 1000 + 3000);
|
||||
const BACKEND_URL = "http://localhost";
|
||||
|
||||
const BACKEND_API = `${BACKEND_URL}:${BACKEND_PORT}`;
|
||||
|
|
|
|||
86
server/__tests__/users.test.js
Normal file
86
server/__tests__/users.test.js
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
const Users = require('../models/users');
|
||||
const bcrypt = require('bcrypt');
|
||||
const Quizzes = require('../models/quiz');
|
||||
const Folders = require('../models/folders');
|
||||
const AppError = require('../middleware/AppError');
|
||||
const { ObjectId } = require('mongodb');
|
||||
|
||||
jest.mock('bcrypt');
|
||||
jest.mock('../middleware/AppError');
|
||||
jest.mock('../models/folders');
|
||||
|
||||
describe('Users', () => {
|
||||
let users;
|
||||
let db;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks(); // Clear any previous mock calls
|
||||
|
||||
// Mock the database connection
|
||||
db = {
|
||||
connect: jest.fn(),
|
||||
getConnection: jest.fn().mockReturnThis(), // Add getConnection method
|
||||
collection: jest.fn().mockReturnThis(),
|
||||
findOne: jest.fn(),
|
||||
insertOne: jest.fn().mockResolvedValue({ insertedId: new ObjectId() }), // Mock insertOne to return an ObjectId
|
||||
updateOne: jest.fn(),
|
||||
deleteOne: jest.fn(),
|
||||
};
|
||||
|
||||
const quizModel = new Quizzes(db);
|
||||
const foldersModel = new Folders(db, quizModel);
|
||||
|
||||
users = new Users(db, foldersModel);
|
||||
});
|
||||
|
||||
it('should register a new user', async () => {
|
||||
db.collection().findOne.mockResolvedValue(null); // No user found
|
||||
db.collection().insertOne.mockResolvedValue({ insertedId: new ObjectId() });
|
||||
bcrypt.hash.mockResolvedValue('hashedPassword');
|
||||
users.folders.create.mockResolvedValue(true);
|
||||
|
||||
const email = 'test@example.com';
|
||||
const password = 'password123';
|
||||
const result = await users.register(email, password);
|
||||
|
||||
expect(db.connect).toHaveBeenCalled();
|
||||
expect(db.collection().findOne).toHaveBeenCalledWith({ email });
|
||||
expect(bcrypt.hash).toHaveBeenCalledWith(password, 10);
|
||||
expect(db.collection().insertOne).toHaveBeenCalledWith({
|
||||
email,
|
||||
password: 'hashedPassword',
|
||||
created_at: expect.any(Date),
|
||||
});
|
||||
expect(users.folders.create).toHaveBeenCalledWith('Dossier par Défaut', expect.any(String));
|
||||
expect(result.insertedId).toBeDefined(); // Ensure result has insertedId
|
||||
});
|
||||
|
||||
// it('should update the user password', async () => {
|
||||
// db.collection().updateOne.mockResolvedValue({ modifiedCount: 1 });
|
||||
// bcrypt.hash.mockResolvedValue('hashedPassword');
|
||||
|
||||
// const email = 'test@example.com';
|
||||
// const newPassword = 'newPassword123';
|
||||
// const result = await users.updatePassword(email, newPassword);
|
||||
|
||||
// expect(db.connect).toHaveBeenCalled();
|
||||
// expect(db.collection().updateOne).toHaveBeenCalledWith(
|
||||
// { email },
|
||||
// { $set: { password: 'hashedPassword' } }
|
||||
// );
|
||||
// expect(result).toEqual(newPassword);
|
||||
// });
|
||||
|
||||
// it('should delete a user', async () => {
|
||||
// db.collection().deleteOne.mockResolvedValue({ deletedCount: 1 });
|
||||
|
||||
// const email = 'test@example.com';
|
||||
// const result = await users.delete(email);
|
||||
|
||||
// expect(db.connect).toHaveBeenCalled();
|
||||
// expect(db.collection().deleteOne).toHaveBeenCalledWith({ email });
|
||||
// expect(result).toBe(true);
|
||||
// });
|
||||
|
||||
// Add more tests as needed
|
||||
});
|
||||
|
|
@ -1,21 +1,57 @@
|
|||
// Import API
|
||||
const express = require("express");
|
||||
const http = require("http");
|
||||
const dotenv = require('dotenv')
|
||||
const dotenv = require('dotenv');
|
||||
|
||||
// Import Sockets
|
||||
const { setupWebsocket } = require("./socket/socket");
|
||||
const { Server } = require("socket.io");
|
||||
// instantiate the db
|
||||
const db = require('./config/db.js');
|
||||
// instantiate the models
|
||||
const quiz = require('./models/quiz.js');
|
||||
const quizModel = new quiz(db);
|
||||
const folders = require('./models/folders.js');
|
||||
const foldersModel = new folders(db, quizModel);
|
||||
const users = require('./models/users.js');
|
||||
const userModel = new users(db, foldersModel);
|
||||
const images = require('./models/images.js');
|
||||
const imageModel = new images(db);
|
||||
const {RoomRepository} = require('./models/room.js');
|
||||
const roomRepModel = new RoomRepository(db);
|
||||
|
||||
//import routers
|
||||
// Instantiate the controllers
|
||||
const QuizProviderOptions = {
|
||||
provider: 'docker'
|
||||
};
|
||||
|
||||
// instantiate the controllers
|
||||
const usersController = require('./controllers/users.js');
|
||||
const usersControllerInstance = new usersController(userModel);
|
||||
const foldersController = require('./controllers/folders.js');
|
||||
const foldersControllerInstance = new foldersController(foldersModel);
|
||||
const quizController = require('./controllers/quiz.js');
|
||||
const quizControllerInstance = new quizController(quizModel, foldersModel);
|
||||
const imagesController = require('./controllers/images.js');
|
||||
const imagesControllerInstance = new imagesController(imageModel);
|
||||
const roomsController = require('./controllers/rooms.js');
|
||||
const roomsControllerInstance = new roomsController(QuizProviderOptions,roomRepModel);
|
||||
|
||||
// export the controllers
|
||||
module.exports.users = usersControllerInstance;
|
||||
module.exports.folders = foldersControllerInstance;
|
||||
module.exports.quizzes = quizControllerInstance;
|
||||
module.exports.images = imagesControllerInstance;
|
||||
module.exports.rooms = roomsControllerInstance;
|
||||
|
||||
//import routers (instantiate controllers as side effect)
|
||||
const userRouter = require('./routers/users.js');
|
||||
const folderRouter = require('./routers/folders.js');
|
||||
const quizRouter = require('./routers/quiz.js');
|
||||
const imagesRouter = require('./routers/images.js')
|
||||
const AuthManager = require('./auth/auth-manager.js')
|
||||
const authRouter = require('./routers/auth.js')
|
||||
const imagesRouter = require('./routers/images.js');
|
||||
const AuthManager = require('./auth/auth-manager.js');
|
||||
const authRouter = require('./routers/auth.js');
|
||||
const roomRouter = require('./routers/rooms.js');
|
||||
const healthRouter = require('./routers/health.js');
|
||||
|
||||
// Setup environement
|
||||
// Setup environment
|
||||
dotenv.config();
|
||||
|
||||
// Setup urls from configs
|
||||
|
|
@ -23,7 +59,6 @@ const use_ports = (process.env['USE_PORTS'] || 'false').toLowerCase() == "true"
|
|||
process.env['FRONTEND_URL'] = process.env['SITE_URL'] + (use_ports ? `:${process.env['FRONTEND_PORT']}`:"")
|
||||
process.env['BACKEND_URL'] = process.env['SITE_URL'] + (use_ports ? `:${process.env['PORT']}`:"")
|
||||
|
||||
const db = require('./config/db.js');
|
||||
const errorHandler = require("./middleware/errorHandler.js");
|
||||
|
||||
// Start app
|
||||
|
|
@ -31,22 +66,10 @@ const app = express();
|
|||
const cors = require("cors");
|
||||
const bodyParser = require('body-parser');
|
||||
|
||||
const configureServer = (httpServer) => {
|
||||
return new Server(httpServer, {
|
||||
path: "/socket.io",
|
||||
cors: {
|
||||
origin: "*",
|
||||
methods: ["GET", "POST"],
|
||||
credentials: true,
|
||||
},
|
||||
});
|
||||
};
|
||||
let server = http.createServer(app);
|
||||
let isDev = process.env.NODE_ENV === 'development';
|
||||
console.log(`Environnement: ${process.env.NODE_ENV} (${isDev ? 'dev' : 'prod'})`);
|
||||
|
||||
// Start sockets
|
||||
const server = http.createServer(app);
|
||||
const io = configureServer(server);
|
||||
|
||||
setupWebsocket(io);
|
||||
app.use(cors());
|
||||
app.use(bodyParser.urlencoded({ extended: true }));
|
||||
app.use(bodyParser.json());
|
||||
|
|
@ -69,21 +92,23 @@ app.use(session({
|
|||
|
||||
authManager = new AuthManager(app)
|
||||
app.use(errorHandler)
|
||||
app.use('/api/room', roomRouter);
|
||||
app.use('/health', healthRouter);
|
||||
|
||||
app.use(errorHandler);
|
||||
|
||||
// Start server
|
||||
async function start() {
|
||||
|
||||
const port = process.env.PORT || 4400;
|
||||
|
||||
// Check DB connection
|
||||
await db.connect()
|
||||
await db.connect();
|
||||
db.getConnection();
|
||||
console.log(`Connexion MongoDB établie`);
|
||||
|
||||
server.listen(port, () => {
|
||||
console.log(`Serveur écoutant sur le port ${port}`);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
start();
|
||||
|
|
|
|||
|
|
@ -1,174 +1,170 @@
|
|||
//controller
|
||||
const model = require('../models/folders.js');
|
||||
|
||||
const AppError = require('../middleware/AppError.js');
|
||||
const { MISSING_REQUIRED_PARAMETER, NOT_IMPLEMENTED, FOLDER_NOT_FOUND, FOLDER_ALREADY_EXISTS, GETTING_FOLDER_ERROR, DELETE_FOLDER_ERROR, UPDATE_FOLDER_ERROR, MOVING_FOLDER_ERROR, DUPLICATE_FOLDER_ERROR, COPY_FOLDER_ERROR } = require('../constants/errorCodes');
|
||||
|
||||
// controllers must use arrow functions to bind 'this' to the class instance in order to access class properties as callbacks in Express
|
||||
class FoldersController {
|
||||
|
||||
constructor(foldersModel) {
|
||||
this.folders = foldersModel;
|
||||
}
|
||||
|
||||
/***
|
||||
* Basic queries
|
||||
*/
|
||||
async create(req, res, next) {
|
||||
create = async (req, res, next) => {
|
||||
try {
|
||||
const { title } = req.body;
|
||||
|
||||
|
||||
if (!title) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
const result = await model.create(title, req.user.userId);
|
||||
|
||||
|
||||
const result = await this.folders.create(title, req.user.userId);
|
||||
|
||||
if (!result) {
|
||||
throw new AppError(FOLDER_ALREADY_EXISTS);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Dossier créé avec succès.'
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async getUserFolders(req, res, next) {
|
||||
|
||||
|
||||
getUserFolders = async (req, res, next) => {
|
||||
try {
|
||||
const folders = await model.getUserFolders(req.user.userId);
|
||||
|
||||
const folders = await this.folders.getUserFolders(req.user.userId);
|
||||
|
||||
if (!folders) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
data: folders
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async getFolderContent(req, res, next) {
|
||||
|
||||
getFolderContent = async (req, res, next) => {
|
||||
try {
|
||||
const { folderId } = req.params;
|
||||
|
||||
|
||||
if (!folderId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this folder mine
|
||||
const owner = await model.getOwner(folderId);
|
||||
|
||||
const owner = await this.folders.getOwner(folderId);
|
||||
|
||||
if (owner != req.user.userId) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
const content = await model.getContent(folderId);
|
||||
|
||||
|
||||
const content = await this.folders.getContent(folderId);
|
||||
|
||||
if (!content) {
|
||||
throw new AppError(GETTING_FOLDER_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
data: content
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async delete(req, res, next) {
|
||||
|
||||
delete = async (req, res, next) => {
|
||||
try {
|
||||
const { folderId } = req.params;
|
||||
|
||||
|
||||
if (!folderId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this folder mine
|
||||
const owner = await model.getOwner(folderId);
|
||||
|
||||
const owner = await this.folders.getOwner(folderId);
|
||||
|
||||
if (owner != req.user.userId) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
const result = await model.delete(folderId);
|
||||
|
||||
|
||||
const result = await this.folders.delete(folderId);
|
||||
|
||||
if (!result) {
|
||||
throw new AppError(DELETE_FOLDER_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Dossier supprimé avec succès.'
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async rename(req, res, next) {
|
||||
|
||||
rename = async (req, res, next) => {
|
||||
try {
|
||||
const { folderId, newTitle } = req.body;
|
||||
|
||||
|
||||
if (!folderId || !newTitle) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this folder mine
|
||||
const owner = await model.getOwner(folderId);
|
||||
|
||||
const owner = await this.folders.getOwner(folderId);
|
||||
|
||||
if (owner != req.user.userId) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
const result = await model.rename(folderId, newTitle);
|
||||
|
||||
|
||||
const result = await this.folders.rename(folderId, newTitle);
|
||||
|
||||
if (!result) {
|
||||
throw new AppError(UPDATE_FOLDER_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Dossier mis à jours avec succès.'
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async duplicate(req, res, next) {
|
||||
|
||||
duplicate = async (req, res, next) => {
|
||||
try {
|
||||
const { folderId, } = req.body;
|
||||
|
||||
if (!folderId ) {
|
||||
const { folderId } = req.body;
|
||||
|
||||
if (!folderId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this folder mine
|
||||
const owner = await model.getOwner(folderId);
|
||||
|
||||
const owner = await this.folders.getOwner(folderId);
|
||||
|
||||
if (owner != req.user.userId) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
const userId = req.user.userId;
|
||||
|
||||
const newFolderId = await model.duplicate(folderId, userId);
|
||||
|
||||
|
||||
const userId = req.user.userId;
|
||||
|
||||
const newFolderId = await this.folders.duplicate(folderId, userId);
|
||||
|
||||
if (!newFolderId) {
|
||||
throw new AppError(DUPLICATE_FOLDER_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Dossier dupliqué avec succès.',
|
||||
newFolderId: newFolderId
|
||||
|
|
@ -177,30 +173,30 @@ class FoldersController {
|
|||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async copy(req, res, next) {
|
||||
|
||||
copy = async (req, res, next) => {
|
||||
try {
|
||||
const { folderId, newTitle } = req.body;
|
||||
|
||||
|
||||
if (!folderId || !newTitle) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this folder mine
|
||||
const owner = await model.getOwner(folderId);
|
||||
|
||||
const owner = await this.folders.getOwner(folderId);
|
||||
|
||||
if (owner != req.user.userId) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
|
||||
const userId = req.user.userId; // Assuming userId is obtained from authentication
|
||||
|
||||
const newFolderId = await model.copy(folderId, userId);
|
||||
|
||||
|
||||
const newFolderId = await this.folders.copy(folderId, userId);
|
||||
|
||||
if (!newFolderId) {
|
||||
throw new AppError(COPY_FOLDER_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Dossier copié avec succès.',
|
||||
newFolderId: newFolderId
|
||||
|
|
@ -210,27 +206,27 @@ class FoldersController {
|
|||
}
|
||||
}
|
||||
|
||||
async getFolderById(req, res, next) {
|
||||
getFolderById = async (req, res, next) => {
|
||||
try {
|
||||
const { folderId } = req.params;
|
||||
|
||||
|
||||
if (!folderId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this folder mine
|
||||
const owner = await model.getOwner(folderId);
|
||||
|
||||
const owner = await this.folders.getOwner(folderId);
|
||||
|
||||
if (owner != req.user.userId) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
const folder = await model.getFolderById(folderId);
|
||||
|
||||
|
||||
const folder = await this.folders.getFolderById(folderId);
|
||||
|
||||
if (!folder) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
data: folder
|
||||
});
|
||||
|
|
@ -238,8 +234,8 @@ class FoldersController {
|
|||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async folderExists(req, res, next) {
|
||||
|
||||
folderExists = async (req, res, next) => {
|
||||
try {
|
||||
const { title } = req.body;
|
||||
|
||||
|
|
@ -247,10 +243,10 @@ class FoldersController {
|
|||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
const userId = req.user.userId;
|
||||
const userId = req.user.userId;
|
||||
|
||||
// Vérifie si le dossier existe pour l'utilisateur donné
|
||||
const exists = await model.folderExists(title, userId);
|
||||
const exists = await this.folders.folderExists(title, userId);
|
||||
|
||||
return res.status(200).json({
|
||||
exists: exists
|
||||
|
|
@ -260,9 +256,8 @@ class FoldersController {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
module.exports = new FoldersController;
|
||||
module.exports = FoldersController;
|
||||
|
|
|
|||
|
|
@ -1,56 +1,55 @@
|
|||
const model = require('../models/images.js');
|
||||
|
||||
const AppError = require('../middleware/AppError.js');
|
||||
const { MISSING_REQUIRED_PARAMETER, IMAGE_NOT_FOUND } = require('../constants/errorCodes');
|
||||
|
||||
class ImagesController {
|
||||
|
||||
async upload(req, res, next) {
|
||||
constructor(imagesModel) {
|
||||
this.images = imagesModel;
|
||||
}
|
||||
|
||||
upload = async (req, res, next) => {
|
||||
try {
|
||||
const file = req.file;
|
||||
|
||||
|
||||
if (!file) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
const id = await model.upload(file, req.user.userId);
|
||||
|
||||
|
||||
const id = await this.images.upload(file, req.user.userId);
|
||||
|
||||
return res.status(200).json({
|
||||
id: id
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async get(req, res, next) {
|
||||
};
|
||||
|
||||
get = async (req, res, next) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
|
||||
if (!id) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
const image = await model.get(id);
|
||||
|
||||
|
||||
const image = await this.images.get(id);
|
||||
|
||||
if (!image) {
|
||||
throw new AppError(IMAGE_NOT_FOUND)
|
||||
throw new AppError(IMAGE_NOT_FOUND);
|
||||
}
|
||||
|
||||
|
||||
// Set Headers for display in browser
|
||||
res.setHeader('Content-Type', image.mime_type);
|
||||
res.setHeader('Content-Disposition', 'inline; filename=' + image.file_name);
|
||||
res.setHeader('Accept-Ranges', 'bytes');
|
||||
res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate');
|
||||
return res.send(image.file_content);
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
module.exports = new ImagesController;
|
||||
module.exports = ImagesController;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
const model = require('../models/quiz.js');
|
||||
const folderModel = require('../models/folders.js');
|
||||
const emailer = require('../config/email.js');
|
||||
|
||||
const AppError = require('../middleware/AppError.js');
|
||||
|
|
@ -7,184 +5,181 @@ const { MISSING_REQUIRED_PARAMETER, NOT_IMPLEMENTED, QUIZ_NOT_FOUND, FOLDER_NOT_
|
|||
|
||||
class QuizController {
|
||||
|
||||
async create(req, res, next) {
|
||||
constructor(quizModel, foldersModel) {
|
||||
this.folders = foldersModel;
|
||||
this.quizzes = quizModel;
|
||||
}
|
||||
|
||||
create = async (req, res, next) => {
|
||||
try {
|
||||
const { title, content, folderId } = req.body;
|
||||
|
||||
|
||||
if (!title || !content || !folderId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this folder mine
|
||||
const owner = await folderModel.getOwner(folderId);
|
||||
|
||||
const owner = await this.folders.getOwner(folderId);
|
||||
|
||||
if (owner != req.user.userId) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
const result = await model.create(title, content, folderId, req.user.userId);
|
||||
|
||||
|
||||
const result = await this.quizzes.create(title, content, folderId, req.user.userId);
|
||||
|
||||
if (!result) {
|
||||
throw new AppError(QUIZ_ALREADY_EXISTS);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Quiz créé avec succès.'
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async get(req, res, next) {
|
||||
};
|
||||
|
||||
get = async (req, res, next) => {
|
||||
try {
|
||||
const { quizId } = req.params;
|
||||
|
||||
|
||||
if (!quizId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
const content = await model.getContent(quizId);
|
||||
|
||||
|
||||
const content = await this.quizzes.getContent(quizId);
|
||||
|
||||
if (!content) {
|
||||
throw new AppError(GETTING_QUIZ_ERROR);
|
||||
}
|
||||
|
||||
|
||||
// Is this quiz mine
|
||||
if (content.userId != req.user.userId) {
|
||||
throw new AppError(QUIZ_NOT_FOUND);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
data: content
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async delete(req, res, next) {
|
||||
};
|
||||
|
||||
delete = async (req, res, next) => {
|
||||
try {
|
||||
const { quizId } = req.params;
|
||||
|
||||
|
||||
if (!quizId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this quiz mine
|
||||
const owner = await model.getOwner(quizId);
|
||||
|
||||
const owner = await this.quizzes.getOwner(quizId);
|
||||
|
||||
if (owner != req.user.userId) {
|
||||
throw new AppError(QUIZ_NOT_FOUND);
|
||||
}
|
||||
|
||||
const result = await model.delete(quizId);
|
||||
|
||||
|
||||
const result = await this.quizzes.delete(quizId);
|
||||
|
||||
if (!result) {
|
||||
throw new AppError(DELETE_QUIZ_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Quiz supprimé avec succès.'
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async update(req, res, next) {
|
||||
};
|
||||
|
||||
update = async (req, res, next) => {
|
||||
try {
|
||||
const { quizId, newTitle, newContent } = req.body;
|
||||
|
||||
|
||||
if (!newTitle || !newContent || !quizId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this quiz mine
|
||||
const owner = await model.getOwner(quizId);
|
||||
|
||||
const owner = await this.quizzes.getOwner(quizId);
|
||||
|
||||
if (owner != req.user.userId) {
|
||||
throw new AppError(QUIZ_NOT_FOUND);
|
||||
}
|
||||
|
||||
const result = await model.update(quizId, newTitle, newContent);
|
||||
|
||||
|
||||
const result = await this.quizzes.update(quizId, newTitle, newContent);
|
||||
|
||||
if (!result) {
|
||||
throw new AppError(UPDATE_QUIZ_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Quiz mis à jours avec succès.'
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async move(req, res, next) {
|
||||
};
|
||||
|
||||
move = async (req, res, next) => {
|
||||
try {
|
||||
const { quizId, newFolderId } = req.body;
|
||||
|
||||
|
||||
if (!quizId || !newFolderId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Is this quiz mine
|
||||
const quizOwner = await model.getOwner(quizId);
|
||||
|
||||
const quizOwner = await this.quizzes.getOwner(quizId);
|
||||
|
||||
if (quizOwner != req.user.userId) {
|
||||
throw new AppError(QUIZ_NOT_FOUND);
|
||||
}
|
||||
|
||||
|
||||
// Is this folder mine
|
||||
const folderOwner = await folderModel.getOwner(newFolderId);
|
||||
|
||||
const folderOwner = await this.folders.getOwner(newFolderId);
|
||||
|
||||
if (folderOwner != req.user.userId) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
const result = await model.move(quizId, newFolderId);
|
||||
|
||||
|
||||
const result = await this.quizzes.move(quizId, newFolderId);
|
||||
|
||||
if (!result) {
|
||||
throw new AppError(MOVING_QUIZ_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Utilisateur déplacé avec succès.'
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
async copy(req, res, next) {
|
||||
copy = async (req, res, next) => {
|
||||
const { quizId, newTitle, folderId } = req.body;
|
||||
|
||||
|
||||
if (!quizId || !newTitle || !folderId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
throw new AppError(NOT_IMPLEMENTED);
|
||||
// const { quizId } = req.params;
|
||||
// const { newUserId } = req.body;
|
||||
|
||||
|
||||
// try {
|
||||
// //Trouver le quiz a dupliquer
|
||||
// const conn = db.getConnection();
|
||||
// const quiztoduplicate = await conn.collection('quiz').findOne({ _id: new ObjectId(quizId) });
|
||||
// const quiztoduplicate = await conn.collection('quiz').findOne({ _id: ObjectId.createFromHexString(quizId) });
|
||||
// if (!quiztoduplicate) {
|
||||
// throw new Error("Quiz non trouvé");
|
||||
// }
|
||||
|
|
@ -192,121 +187,119 @@ class QuizController {
|
|||
// //Suppression du id du quiz pour ne pas le répliquer
|
||||
// delete quiztoduplicate._id;
|
||||
// //Ajout du duplicata
|
||||
// await conn.collection('quiz').insertOne({ ...quiztoduplicate, userId: new ObjectId(newUserId) });
|
||||
// await conn.collection('quiz').insertOne({ ...quiztoduplicate, userId: ObjectId.createFromHexString(newUserId) });
|
||||
// res.json(Response.ok("Dossier dupliqué avec succès pour un autre utilisateur"));
|
||||
|
||||
|
||||
// } catch (error) {
|
||||
// if (error.message.startsWith("Quiz non trouvé")) {
|
||||
// return res.status(404).json(Response.badRequest(error.message));
|
||||
// }
|
||||
// res.status(500).json(Response.serverError(error.message));
|
||||
// }
|
||||
}
|
||||
|
||||
async deleteQuizzesByFolderId(req, res, next) {
|
||||
};
|
||||
|
||||
deleteQuizzesByFolderId = async (req, res, next) => {
|
||||
try {
|
||||
const { folderId } = req.body;
|
||||
|
||||
|
||||
if (!folderId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// Call the method from the Quiz model to delete quizzes by folder ID
|
||||
await Quiz.deleteQuizzesByFolderId(folderId);
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Quizzes deleted successfully.'
|
||||
});
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async duplicate(req, res, next) {
|
||||
const { quizId } = req.body;
|
||||
|
||||
};
|
||||
|
||||
duplicate = async (req, res, next) => {
|
||||
const { quizId } = req.body;
|
||||
|
||||
try {
|
||||
const newQuizId = await model.duplicate(quizId,req.user.userId);
|
||||
const newQuizId = await this.quizzes.duplicate(quizId, req.user.userId);
|
||||
res.status(200).json({ success: true, newQuizId });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async quizExists(title, userId) {
|
||||
};
|
||||
|
||||
quizExists = async (title, userId) => {
|
||||
try {
|
||||
const existingFile = await model.quizExists(title, userId);
|
||||
const existingFile = await this.quizzes.quizExists(title, userId);
|
||||
return existingFile !== null;
|
||||
} catch (error) {
|
||||
throw new AppError(GETTING_QUIZ_ERROR);
|
||||
}
|
||||
}
|
||||
|
||||
async Share(req, res, next) {
|
||||
};
|
||||
|
||||
share = async (req, res, next) => {
|
||||
try {
|
||||
const { quizId, email } = req.body;
|
||||
|
||||
if ( !quizId || !email) {
|
||||
if (!quizId || !email) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const link = `${process.env.FRONTEND_URL}/teacher/Share/${quizId}`;
|
||||
|
||||
|
||||
emailer.quizShare(email, link);
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Quiz partagé avec succès.'
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
async getShare(req, res, next) {
|
||||
getShare = async (req, res, next) => {
|
||||
try {
|
||||
const { quizId } = req.params;
|
||||
|
||||
if ( !quizId ) {
|
||||
if (!quizId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
const content = await model.getContent(quizId);
|
||||
|
||||
}
|
||||
|
||||
const content = await this.quizzes.getContent(quizId);
|
||||
|
||||
if (!content) {
|
||||
throw new AppError(GETTING_QUIZ_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
data: content.title
|
||||
});
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
async receiveShare(req, res, next) {
|
||||
};
|
||||
|
||||
receiveShare = async (req, res, next) => {
|
||||
try {
|
||||
const { quizId, folderId } = req.body;
|
||||
|
||||
if (!quizId || !folderId) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
const folderOwner = await folderModel.getOwner(folderId);
|
||||
|
||||
const folderOwner = await this.folders.getOwner(folderId);
|
||||
if (folderOwner != req.user.userId) {
|
||||
throw new AppError(FOLDER_NOT_FOUND);
|
||||
}
|
||||
|
||||
const content = await model.getContent(quizId);
|
||||
const content = await this.quizzes.getContent(quizId);
|
||||
if (!content) {
|
||||
throw new AppError(GETTING_QUIZ_ERROR);
|
||||
}
|
||||
|
||||
const result = await model.create(content.title, content.content, folderId, req.user.userId);
|
||||
const result = await this.quizzes.create(content.title, content.content, folderId, req.user.userId);
|
||||
if (!result) {
|
||||
throw new AppError(QUIZ_ALREADY_EXISTS);
|
||||
}
|
||||
|
|
@ -314,13 +307,11 @@ class QuizController {
|
|||
return res.status(200).json({
|
||||
message: 'Quiz partagé reçu.'
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
module.exports = new QuizController;
|
||||
module.exports = QuizController;
|
||||
|
|
|
|||
94
server/controllers/rooms.js
Normal file
94
server/controllers/rooms.js
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
const {Room} = require('../models/room.js');
|
||||
|
||||
const BaseRoomProvider = require('../roomsProviders/base-provider.js');
|
||||
//const ClusterRoomProvider = require('../roomsProviders/cluster-provider.js');
|
||||
const DockerRoomProvider = require('../roomsProviders/docker-provider.js');
|
||||
//const KubernetesRoomProvider = require('../roomsProviders/kubernetes-provider');
|
||||
|
||||
const NB_CODE_CHARS = 6;
|
||||
const NB_MS_UPDATE_ROOM = 1000;
|
||||
const NB_MS_CLEANUP = 30000;
|
||||
|
||||
class RoomsController {
|
||||
constructor(options = {}, roomRepository) {
|
||||
this.provider = this.createProvider(
|
||||
options.provider || process.env.ROOM_PROVIDER || 'cluster',
|
||||
options.providerOptions,
|
||||
roomRepository
|
||||
);
|
||||
this.roomRepository = roomRepository;
|
||||
this.setupTasks();
|
||||
}
|
||||
|
||||
createProvider(type, options, repository) {
|
||||
switch (type) {
|
||||
/*
|
||||
case 'cluster':
|
||||
return new ClusterRoomProvider(options, this.roomRepository);
|
||||
*/
|
||||
// Uncomment these as needed
|
||||
case 'docker':
|
||||
return new DockerRoomProvider(options, repository);
|
||||
/*
|
||||
case 'kubernetes':
|
||||
return new KubernetesRoomProvider(options);
|
||||
*/
|
||||
default:
|
||||
throw new Error(`Type d'approvisionement inconnu: ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
async setupTasks(){
|
||||
await this.provider.syncInstantiatedRooms();
|
||||
// Update rooms
|
||||
setInterval(() => {
|
||||
this.provider.updateRoomsInfo().catch(console.error);
|
||||
}, NB_MS_UPDATE_ROOM);
|
||||
|
||||
// Cleanup rooms
|
||||
setInterval(() => {
|
||||
this.provider.cleanup().catch(console.error);
|
||||
}, NB_MS_CLEANUP);
|
||||
}
|
||||
|
||||
async createRoom(options = {}) {
|
||||
let roomIdValid = false
|
||||
let roomId;
|
||||
|
||||
while(!roomIdValid){
|
||||
roomId = options.roomId || this.generateRoomId();
|
||||
roomIdValid = !(await this.provider.getRoomInfo(roomId));
|
||||
}
|
||||
|
||||
return await this.provider.createRoom(roomId,options);
|
||||
}
|
||||
|
||||
async updateRoom(roomId, info) {
|
||||
return await this.provider.updateRoomInfo(roomId, {});
|
||||
}
|
||||
|
||||
async deleteRoom(roomId) {
|
||||
return await this.provider.deleteRoom(roomId);
|
||||
}
|
||||
|
||||
async getRoomStatus(roomId) {
|
||||
return await this.provider.getRoomStatus(roomId);
|
||||
}
|
||||
|
||||
async listRooms() {
|
||||
return await this.provider.listRooms();
|
||||
}
|
||||
|
||||
generateRoomId() {
|
||||
const characters = "0123456789";
|
||||
let result = "";
|
||||
for (let i = 0; i < NB_CODE_CHARS; i++) {
|
||||
result += characters.charAt(
|
||||
Math.floor(Math.random() * characters.length)
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RoomsController;
|
||||
|
|
@ -1,42 +1,43 @@
|
|||
const emailer = require('../config/email.js');
|
||||
const model = require('../models/users.js');
|
||||
const jwt = require('../middleware/jwtToken.js');
|
||||
|
||||
const AppError = require('../middleware/AppError.js');
|
||||
const { MISSING_REQUIRED_PARAMETER, LOGIN_CREDENTIALS_ERROR, GENERATE_PASSWORD_ERROR, UPDATE_PASSWORD_ERROR, DELETE_USER_ERROR } = require('../constants/errorCodes');
|
||||
|
||||
// controllers must use arrow functions to bind 'this' to the class instance in order to access class properties as callbacks in Express
|
||||
class UsersController {
|
||||
constructor(userModel) {
|
||||
this.users = userModel;
|
||||
}
|
||||
|
||||
async delete(req, res, next) {
|
||||
try {
|
||||
const { email, password } = req.body;
|
||||
|
||||
|
||||
if (!email || !password) {
|
||||
throw new AppError(MISSING_REQUIRED_PARAMETER);
|
||||
}
|
||||
|
||||
|
||||
// verify creds first
|
||||
const user = await model.login(email, password);
|
||||
|
||||
const user = await this.users.login(email, password);
|
||||
|
||||
if (!user) {
|
||||
throw new AppError(LOGIN_CREDENTIALS_ERROR);
|
||||
}
|
||||
|
||||
const result = await model.delete(email)
|
||||
|
||||
|
||||
const result = await this.users.delete(email);
|
||||
|
||||
if (!result) {
|
||||
throw new AppError(DELETE_USER_ERROR)
|
||||
throw new AppError(DELETE_USER_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
message: 'Utilisateur supprimé avec succès'
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = new UsersController;
|
||||
module.exports = UsersController;
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class Token {
|
|||
if (error) {
|
||||
throw new AppError(UNAUTHORIZED_INVALID_TOKEN)
|
||||
}
|
||||
|
||||
|
||||
req.user = payload;
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,19 +1,31 @@
|
|||
//model
|
||||
const db = require('../config/db.js')
|
||||
const { ObjectId } = require('mongodb');
|
||||
const Quiz = require('./quiz.js');
|
||||
const ObjectId = require('mongodb').ObjectId;
|
||||
const { generateUniqueTitle } = require('./utils');
|
||||
|
||||
class Folders {
|
||||
constructor(db, quizModel) {
|
||||
this.db = db;
|
||||
this.quizModel = quizModel;
|
||||
}
|
||||
|
||||
async create(title, userId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
|
||||
console.log("LOG: create", title, userId);
|
||||
|
||||
if (!title || !userId) {
|
||||
throw new Error('Missing required parameter(s)');
|
||||
}
|
||||
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const foldersCollection = conn.collection('folders');
|
||||
|
||||
const existingFolder = await foldersCollection.findOne({ title: title, userId: userId });
|
||||
|
||||
if (existingFolder) return new Error('Folder already exists');
|
||||
if (existingFolder) {
|
||||
throw new Error('Folder already exists');
|
||||
}
|
||||
|
||||
const newFolder = {
|
||||
userId: userId,
|
||||
|
|
@ -27,8 +39,8 @@ class Folders {
|
|||
}
|
||||
|
||||
async getUserFolders(userId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const foldersCollection = conn.collection('folders');
|
||||
|
||||
|
|
@ -38,19 +50,20 @@ class Folders {
|
|||
}
|
||||
|
||||
async getOwner(folderId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const foldersCollection = conn.collection('folders');
|
||||
|
||||
const folder = await foldersCollection.findOne({ _id: new ObjectId(folderId) });
|
||||
const folder = await foldersCollection.findOne({ _id: ObjectId.createFromHexString(folderId) });
|
||||
|
||||
return folder.userId;
|
||||
}
|
||||
|
||||
// finds all quizzes in a folder
|
||||
async getContent(folderId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const filesCollection = conn.collection('files');
|
||||
|
||||
|
|
@ -60,26 +73,26 @@ class Folders {
|
|||
}
|
||||
|
||||
async delete(folderId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const foldersCollection = conn.collection('folders');
|
||||
|
||||
const folderResult = await foldersCollection.deleteOne({ _id: new ObjectId(folderId) });
|
||||
const folderResult = await foldersCollection.deleteOne({ _id: ObjectId.createFromHexString(folderId) });
|
||||
|
||||
if (folderResult.deletedCount != 1) return false;
|
||||
await Quiz.deleteQuizzesByFolderId(folderId);
|
||||
await this.quizModel.deleteQuizzesByFolderId(folderId);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async rename(folderId, newTitle) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const foldersCollection = conn.collection('folders');
|
||||
|
||||
const result = await foldersCollection.updateOne({ _id: new ObjectId(folderId) }, { $set: { title: newTitle } })
|
||||
const result = await foldersCollection.updateOne({ _id: ObjectId.createFromHexString(folderId) }, { $set: { title: newTitle } })
|
||||
|
||||
if (result.modifiedCount != 1) return false;
|
||||
|
||||
|
|
@ -87,69 +100,77 @@ class Folders {
|
|||
}
|
||||
|
||||
async duplicate(folderId, userId) {
|
||||
console.log("LOG: duplicate", folderId, userId);
|
||||
const conn = this.db.getConnection();
|
||||
const foldersCollection = conn.collection('folders');
|
||||
|
||||
const sourceFolder = await this.getFolderWithContent(folderId);
|
||||
|
||||
// Check if the new title already exists
|
||||
let newFolderTitle = sourceFolder.title + "-copie";
|
||||
let counter = 1;
|
||||
|
||||
while (await this.folderExists(newFolderTitle, userId)) {
|
||||
newFolderTitle = `${sourceFolder.title}-copie(${counter})`;
|
||||
counter++;
|
||||
const sourceFolder = await foldersCollection.findOne({ _id: ObjectId.createFromHexString(folderId), userId: userId });
|
||||
if (!sourceFolder) {
|
||||
throw new Error(`Folder ${folderId} not found`);
|
||||
}
|
||||
|
||||
|
||||
|
||||
const theUserId = userId;
|
||||
// Use the utility function to generate a unique title
|
||||
const newFolderTitle = await generateUniqueTitle(sourceFolder.title, async (title) => {
|
||||
console.log(`generateUniqueTitle(${title}): userId`, theUserId);
|
||||
return await foldersCollection.findOne({ title: title, userId: theUserId });
|
||||
});
|
||||
|
||||
const newFolderId = await this.create(newFolderTitle, userId);
|
||||
|
||||
if (!newFolderId) {
|
||||
throw new Error('Failed to create a duplicate folder.');
|
||||
throw new Error('Failed to create duplicate folder');
|
||||
}
|
||||
|
||||
for (const quiz of sourceFolder.content) {
|
||||
const { title, content } = quiz;
|
||||
//console.log(title);
|
||||
//console.log(content);
|
||||
await Quiz.create(title, content, newFolderId.toString(), userId);
|
||||
// copy the quizzes from source folder to destination folder
|
||||
const content = await this.getContent(folderId);
|
||||
console.log("folders.duplicate: found content", content);
|
||||
for (const quiz of content) {
|
||||
console.log("folders.duplicate: creating quiz (copy)", quiz);
|
||||
const result = await this.quizModel.create(quiz.title, quiz.content, newFolderId.toString(), userId);
|
||||
if (!result) {
|
||||
throw new Error('Failed to create duplicate quiz');
|
||||
}
|
||||
}
|
||||
|
||||
return newFolderId;
|
||||
|
||||
}
|
||||
|
||||
async folderExists(title, userId) {
|
||||
await db.connect();
|
||||
const conn = db.getConnection();
|
||||
console.log("LOG: folderExists", title, userId);
|
||||
await this.db.connect();
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const foldersCollection = conn.collection('folders');
|
||||
const existingFolder = await foldersCollection.findOne({ title: title, userId: userId });
|
||||
|
||||
return existingFolder !== null;
|
||||
return !!existingFolder;
|
||||
}
|
||||
|
||||
|
||||
async copy(folderId, userId) {
|
||||
|
||||
|
||||
const sourceFolder = await this.getFolderWithContent(folderId);
|
||||
const newFolderId = await this.create(sourceFolder.title, userId);
|
||||
if (!newFolderId) {
|
||||
throw new Error('Failed to create a new folder.');
|
||||
}
|
||||
for (const quiz of sourceFolder.content) {
|
||||
await this.createQuiz(quiz.title, quiz.content, newFolderId, userId);
|
||||
await this.quizModel.create(quiz.title, quiz.content, newFolderId, userId);
|
||||
}
|
||||
|
||||
return newFolderId;
|
||||
|
||||
}
|
||||
|
||||
async getFolderById(folderId) {
|
||||
await db.connect();
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect();
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const foldersCollection = conn.collection('folders');
|
||||
|
||||
const folder = await foldersCollection.findOne({ _id: new ObjectId(folderId) });
|
||||
const folder = await foldersCollection.findOne({ _id: ObjectId.createFromHexString(folderId) });
|
||||
|
||||
if (!folder) return new Error(`Folder ${folderId} not found`);
|
||||
|
||||
return folder;
|
||||
}
|
||||
|
|
@ -171,4 +192,4 @@ class Folders {
|
|||
|
||||
}
|
||||
|
||||
module.exports = new Folders;
|
||||
module.exports = Folders;
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
const db = require('../config/db.js')
|
||||
//const db = require('../config/db.js')
|
||||
const { ObjectId } = require('mongodb');
|
||||
|
||||
class Images {
|
||||
|
||||
constructor(db) {
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
async upload(file, userId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
|
|
@ -28,7 +32,7 @@ class Images {
|
|||
|
||||
const imagesCollection = conn.collection('images');
|
||||
|
||||
const result = await imagesCollection.findOne({ _id: new ObjectId(id) });
|
||||
const result = await imagesCollection.findOne({ _id: ObjectId.createFromHexString(id) });
|
||||
|
||||
if (!result) return null;
|
||||
|
||||
|
|
@ -41,4 +45,4 @@ class Images {
|
|||
|
||||
}
|
||||
|
||||
module.exports = new Images;
|
||||
module.exports = Images;
|
||||
|
|
|
|||
|
|
@ -1,17 +1,25 @@
|
|||
const db = require('../config/db.js')
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { generateUniqueTitle } = require('./utils');
|
||||
|
||||
class Quiz {
|
||||
|
||||
constructor(db) {
|
||||
// console.log("Quiz constructor: db", db)
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
async create(title, content, folderId, userId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
console.log(`quizzes: create title: ${title}, folderId: ${folderId}, userId: ${userId}`);
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const quizCollection = conn.collection('files');
|
||||
|
||||
const existingQuiz = await quizCollection.findOne({ title: title, folderId: folderId, userId: userId })
|
||||
|
||||
if (existingQuiz) return null;
|
||||
if (existingQuiz) {
|
||||
throw new Error(`Quiz already exists with title: ${title}, folderId: ${folderId}, userId: ${userId}`);
|
||||
}
|
||||
|
||||
const newQuiz = {
|
||||
folderId: folderId,
|
||||
|
|
@ -23,74 +31,87 @@ class Quiz {
|
|||
}
|
||||
|
||||
const result = await quizCollection.insertOne(newQuiz);
|
||||
console.log("quizzes: create insertOne result", result);
|
||||
|
||||
return result.insertedId;
|
||||
}
|
||||
|
||||
async getOwner(quizId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const quizCollection = conn.collection('files');
|
||||
|
||||
const quiz = await quizCollection.findOne({ _id: new ObjectId(quizId) });
|
||||
const quiz = await quizCollection.findOne({ _id: ObjectId.createFromHexString(quizId) });
|
||||
|
||||
return quiz.userId;
|
||||
}
|
||||
|
||||
async getContent(quizId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
|
||||
const quizCollection = conn.collection('files');
|
||||
|
||||
const quiz = await quizCollection.findOne({ _id: new ObjectId(quizId) });
|
||||
const quiz = await quizCollection.findOne({ _id: ObjectId.createFromHexString(quizId) });
|
||||
|
||||
return quiz;
|
||||
}
|
||||
|
||||
async delete(quizId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const quizCollection = conn.collection('files');
|
||||
|
||||
const result = await quizCollection.deleteOne({ _id: new ObjectId(quizId) });
|
||||
const result = await quizCollection.deleteOne({ _id: ObjectId.createFromHexString(quizId) });
|
||||
|
||||
if (result.deletedCount != 1) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
async deleteQuizzesByFolderId(folderId) {
|
||||
await db.connect();
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect();
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const quizzesCollection = conn.collection('files');
|
||||
|
||||
// Delete all quizzes with the specified folderId
|
||||
await quizzesCollection.deleteMany({ folderId: folderId });
|
||||
const result = await quizzesCollection.deleteMany({ folderId: folderId });
|
||||
return result.deletedCount > 0;
|
||||
}
|
||||
|
||||
async update(quizId, newTitle, newContent) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const quizCollection = conn.collection('files');
|
||||
|
||||
const result = await quizCollection.updateOne({ _id: new ObjectId(quizId) }, { $set: { title: newTitle, content: newContent } });
|
||||
//Ne fonctionne pas si rien n'est chngé dans le quiz
|
||||
//if (result.modifiedCount != 1) return false;
|
||||
const result = await quizCollection.updateOne(
|
||||
{ _id: ObjectId.createFromHexString(quizId) },
|
||||
{
|
||||
$set: {
|
||||
title: newTitle,
|
||||
content: newContent,
|
||||
updated_at: new Date()
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return true
|
||||
return result.modifiedCount === 1;
|
||||
}
|
||||
|
||||
async move(quizId, newFolderId) {
|
||||
await db.connect()
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect()
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const quizCollection = conn.collection('files');
|
||||
|
||||
const result = await quizCollection.updateOne({ _id: new ObjectId(quizId) }, { $set: { folderId: newFolderId } });
|
||||
const result = await quizCollection.updateOne(
|
||||
{ _id: ObjectId.createFromHexString(quizId) },
|
||||
{ $set: { folderId: newFolderId } }
|
||||
);
|
||||
|
||||
if (result.modifiedCount != 1) return false;
|
||||
|
||||
|
|
@ -98,29 +119,31 @@ class Quiz {
|
|||
}
|
||||
|
||||
async duplicate(quizId, userId) {
|
||||
|
||||
const sourceQuiz = await this.getContent(quizId);
|
||||
|
||||
let newQuizTitle = `${sourceQuiz.title}-copy`;
|
||||
let counter = 1;
|
||||
while (await this.quizExists(newQuizTitle, userId)) {
|
||||
newQuizTitle = `${sourceQuiz.title}-copy(${counter})`;
|
||||
counter++;
|
||||
const conn = this.db.getConnection();
|
||||
const quizCollection = conn.collection('files');
|
||||
|
||||
const sourceQuiz = await quizCollection.findOne({ _id: ObjectId.createFromHexString(quizId), userId: userId });
|
||||
if (!sourceQuiz) {
|
||||
throw new Error('Quiz not found for quizId: ' + quizId);
|
||||
}
|
||||
//console.log(newQuizTitle);
|
||||
const newQuizId = await this.create(newQuizTitle, sourceQuiz.content,sourceQuiz.folderId, userId);
|
||||
|
||||
// Use the utility function to generate a unique title
|
||||
const newQuizTitle = await generateUniqueTitle(sourceQuiz.title, async (title) => {
|
||||
return await quizCollection.findOne({ title: title, folderId: sourceQuiz.folderId, userId: userId });
|
||||
});
|
||||
|
||||
const newQuizId = await this.create(newQuizTitle, sourceQuiz.content, sourceQuiz.folderId, userId);
|
||||
|
||||
if (!newQuizId) {
|
||||
throw new Error('Failed to create a duplicate quiz.');
|
||||
throw new Error('Failed to create duplicate quiz');
|
||||
}
|
||||
|
||||
return newQuizId;
|
||||
|
||||
}
|
||||
|
||||
async quizExists(title, userId) {
|
||||
await db.connect();
|
||||
const conn = db.getConnection();
|
||||
await this.db.connect();
|
||||
const conn = this.db.getConnection();
|
||||
|
||||
const filesCollection = conn.collection('files');
|
||||
const existingFolder = await filesCollection.findOne({ title: title, userId: userId });
|
||||
|
|
@ -130,4 +153,4 @@ class Quiz {
|
|||
|
||||
}
|
||||
|
||||
module.exports = new Quiz;
|
||||
module.exports = Quiz;
|
||||
|
|
|
|||
87
server/models/room.js
Normal file
87
server/models/room.js
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
class Room {
|
||||
constructor(id, name, host, nbStudents = 0,) { // Default nbStudents to 0
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
|
||||
if (!host.startsWith('http://') && !host.startsWith('https://')) {
|
||||
host = 'http://' + host;
|
||||
}
|
||||
this.host = host;
|
||||
|
||||
this.nbStudents = nbStudents;
|
||||
this.mustBeCleaned = false;
|
||||
}
|
||||
}
|
||||
|
||||
class RoomRepository {
|
||||
constructor(db) {
|
||||
this.db = db;
|
||||
this.connection = null;
|
||||
this.collection = null;
|
||||
}
|
||||
|
||||
async init() {
|
||||
if (!this.connection) {
|
||||
await this.db.connect();
|
||||
this.connection = this.db.getConnection();
|
||||
}
|
||||
if (!this.collection) this.collection = this.connection.collection('rooms');
|
||||
}
|
||||
|
||||
async create(room) {
|
||||
await this.init();
|
||||
const existingRoom = await this.collection.findOne({ id: room.id });
|
||||
if (existingRoom) {
|
||||
throw new Error(`Érreur: la salle ${room.id} existe déja`);
|
||||
}
|
||||
const returnedId = await this.collection.insertOne(room);
|
||||
return await this.collection.findOne({ _id: returnedId.insertedId });
|
||||
}
|
||||
|
||||
async get(id) {
|
||||
await this.init();
|
||||
const existingRoom = await this.collection.findOne({ id: id });
|
||||
if (!existingRoom) {
|
||||
console.warn(`La sale avec l'identifiant ${id} n'as pas été trouvé.`);
|
||||
return null;
|
||||
}
|
||||
return existingRoom;
|
||||
}
|
||||
|
||||
async getAll() {
|
||||
await this.init();
|
||||
return await this.collection.find().toArray();
|
||||
}
|
||||
|
||||
async update(room,roomId = null) {
|
||||
await this.init();
|
||||
|
||||
const searchId = roomId ?? room.id;
|
||||
|
||||
const result = await this.collection.updateOne(
|
||||
{ id: searchId },
|
||||
{ $set: room },
|
||||
{ upsert: false }
|
||||
);
|
||||
|
||||
if (result.modifiedCount === 0) {
|
||||
if (result.matchedCount > 0) {
|
||||
return true; // Document exists but no changes needed
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async delete(id) {
|
||||
await this.init();
|
||||
const result = await this.collection.deleteOne({ id: id });
|
||||
if (result.deletedCount === 0) {
|
||||
console.warn(`La salle ${id} n'as pas été trouvée pour éffectuer sa suppression.`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { Room, RoomRepository };
|
||||
|
|
@ -1,11 +1,18 @@
|
|||
//user
|
||||
const db = require("../config/db.js");
|
||||
|
||||
const bcrypt = require("bcrypt");
|
||||
const AppError = require("../middleware/AppError.js");
|
||||
const { USER_ALREADY_EXISTS } = require("../constants/errorCodes");
|
||||
const Folders = require("../models/folders.js");
|
||||
|
||||
class Users {
|
||||
|
||||
constructor(db, foldersModel) {
|
||||
// console.log("Users constructor: db", db)
|
||||
this.db = db;
|
||||
this.folders = foldersModel;
|
||||
}
|
||||
|
||||
async hashPassword(password) {
|
||||
return await bcrypt.hash(password, 10);
|
||||
}
|
||||
|
|
@ -187,4 +194,4 @@ class Users {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports = new Users();
|
||||
module.exports = Users;
|
||||
|
|
|
|||
35
server/models/utils.js
Normal file
35
server/models/utils.js
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
// utils.js
|
||||
async function generateUniqueTitle(baseTitle, existsCallback) {
|
||||
console.log(`generateUniqueTitle(${baseTitle})`);
|
||||
let newTitle = baseTitle;
|
||||
let counter = 1;
|
||||
|
||||
const titleRegex = /(.*?)(\((\d+)\))?$/;
|
||||
const match = baseTitle.match(titleRegex);
|
||||
if (match) {
|
||||
baseTitle = match[1].trim();
|
||||
counter = match[3] ? parseInt(match[3], 10) + 1 : 1;
|
||||
}
|
||||
|
||||
// If the base title does not end with a parentheses expression, start with "(1)"
|
||||
if (!match[2]) {
|
||||
newTitle = `${baseTitle} (${counter})`;
|
||||
} else {
|
||||
// else increment the counter in the parentheses expression as a first try
|
||||
newTitle = `${baseTitle} (${counter})`;
|
||||
}
|
||||
|
||||
console.log(`first check of newTitle: ${newTitle}`);
|
||||
|
||||
while (await existsCallback(newTitle)) {
|
||||
counter++;
|
||||
newTitle = `${baseTitle} (${counter})`;
|
||||
console.log(`trying newTitle: ${newTitle}`);
|
||||
}
|
||||
|
||||
return newTitle;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
generateUniqueTitle
|
||||
};
|
||||
2707
server/package-lock.json
generated
2707
server/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -6,22 +6,25 @@
|
|||
"scripts": {
|
||||
"build": "webpack --config webpack.config.js",
|
||||
"start": "node app.js",
|
||||
"dev": "nodemon app.js",
|
||||
"test": "jest",
|
||||
"postinstall": "patch-package"
|
||||
"postinstall": "patch-package",
|
||||
"dev": "cross-env NODE_ENV=development nodemon app.js",
|
||||
"test": "jest --colors"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/express": "^5.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"cors": "^2.8.5",
|
||||
"dockerode": "^4.0.2",
|
||||
"dotenv": "^16.4.4",
|
||||
"express": "^4.18.2",
|
||||
"express-session": "^1.18.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"mongodb": "^6.3.0",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"net": "^1.0.2",
|
||||
"nodemailer": "^6.9.9",
|
||||
"passport": "^0.7.0",
|
||||
"passport-oauth2": "^1.8.0",
|
||||
|
|
@ -31,7 +34,10 @@
|
|||
"socket.io-client": "^4.7.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.8.4",
|
||||
"cross-env": "^7.0.3",
|
||||
"jest": "^29.7.0",
|
||||
"jest-mock": "^29.7.0",
|
||||
"nodemon": "^3.0.1",
|
||||
"supertest": "^6.3.4"
|
||||
},
|
||||
|
|
|
|||
75
server/roomsProviders/base-provider.js
Normal file
75
server/roomsProviders/base-provider.js
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
/**
|
||||
* @template T
|
||||
* @typedef {import('../../types/room').RoomInfo} RoomInfo
|
||||
* @typedef {import('../../types/room').RoomOptions} RoomOptions
|
||||
* @typedef {import('../../types/room').BaseProviderConfig} BaseProviderConfig
|
||||
*/
|
||||
|
||||
const MIN_NB_SECONDS_BEFORE_CLEANUP = process.env.MIN_NB_SECONDS_BEFORE_CLEANUP || 60
|
||||
|
||||
class BaseRoomProvider {
|
||||
constructor(config = {}, roomRepository) {
|
||||
this.config = config;
|
||||
this.roomRepository = roomRepository;
|
||||
|
||||
this.quiz_docker_image = process.env.QUIZROOM_IMAGE || "evaluetonsavoir-quizroom";
|
||||
this.quiz_docker_port = process.env.QUIZROOM_PORT || 4500;
|
||||
this.quiz_expose_port = process.env.QUIZROOM_EXPOSE_PORT || false;
|
||||
}
|
||||
|
||||
async createRoom(roomId, options) {
|
||||
throw new Error("Fonction non-implantée - classe abstraite");
|
||||
}
|
||||
|
||||
async deleteRoom(roomId) {
|
||||
throw new Error("Fonction non-implantée - classe abstraite");
|
||||
}
|
||||
|
||||
async getRoomStatus(roomId) {
|
||||
throw new Error("Fonction non-implantée - classe abstraite");
|
||||
}
|
||||
|
||||
async listRooms() {
|
||||
throw new Error("Fonction non-implantée - classe abstraite");
|
||||
}
|
||||
|
||||
async cleanup() {
|
||||
throw new Error("Fonction non-implantée - classe abstraite");
|
||||
}
|
||||
|
||||
async syncInstantiatedRooms(){
|
||||
throw new Error("Fonction non-implantée - classe abstraite");
|
||||
}
|
||||
|
||||
async updateRoomsInfo() {
|
||||
const rooms = await this.roomRepository.getAll();
|
||||
for(var room of rooms){
|
||||
const url = `${room.host}/health`;
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
|
||||
if (!response.ok) {
|
||||
room.mustBeCleaned = true;
|
||||
await this.roomRepository.update(room);
|
||||
continue;
|
||||
}
|
||||
|
||||
const json = await response.json();
|
||||
room.nbStudents = json.connections;
|
||||
room.mustBeCleaned = room.nbStudents === 0 && json.uptime >MIN_NB_SECONDS_BEFORE_CLEANUP;
|
||||
|
||||
await this.roomRepository.update(room);
|
||||
} catch (error) {
|
||||
room.mustBeCleaned = true;
|
||||
await this.roomRepository.update(room);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getRoomInfo(roomId) {
|
||||
const info = await this.roomRepository.get(roomId);
|
||||
return info;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BaseRoomProvider;
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue