Compare commits

...

3 Commits

Author SHA1 Message Date
e0f3e5f738 04/12/25 2025-12-04 10:52:20 +01:00
1d41518f24 04/12/25 2025-12-04 10:45:53 +01:00
f2a051576d 04/12/25 2025-12-04 10:37:05 +01:00
28 changed files with 356 additions and 1025 deletions

View File

@ -17,9 +17,20 @@ services:
- "3000:3000"
volumes:
- grafana-data:/var/lib/grafana
- ./observability/grafana/provisioning/dashboards:/etc/grafana/provisioning/dashboards:ro
- ./observability/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro
- ./observability/grafana/dashboards:/var/lib/grafana/dashboards:ro
environment:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=admin
- GF_SMTP_ENABLED=true
- GF_SMTP_HOST=smtp.gmail.com:587
- GF_SMTP_USER=kimraumilliardaire@gmail.com
- GF_SMTP_PASSWORD=vmfc xrtt yvvm gylz
- GF_SMTP_FROM_ADDRESS=kimraumilliardaire@gmail.com
- GF_SMTP_FROM_NAME=Grafana Alerts
- GF_SMTP_SKIP_VERIFY=true # utile si problème de certifs côté conteneur
# volumes, depends_on, etc.
networks:
- observability
depends_on:

View File

@ -0,0 +1,40 @@
{
"title": "App Runtime Overview (Dev)",
"uid": "dev-app-runtime-overview",
"tags": ["dev","application"],
"time": { "from": "now-1h", "to": "now" },
"schemaVersion": 42,
"panels": [
{
"type": "timeseries",
"title": "Requests per Second (RPS)",
"gridPos": {"x":0,"y":0,"w":12,"h":8},
"datasource": "prometheus",
"targets": [
{ "refId": "A", "expr": "sum(rate(http_requests_total[2m]))" }
]
},
{
"type": "timeseries",
"title": "Error Rate (%)",
"gridPos": {"x":12,"y":0,"w":12,"h":8},
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "percent" }, "overrides": [] },
"targets": [
{ "refId": "A", "expr": "sum(rate(http_requests_total{status=~\"5..\"}[2m])) / sum(rate(http_requests_total[2m])) * 100" }
]
},
{
"type": "timeseries",
"title": "Latency P95 (s)",
"gridPos": {"x":0,"y":8,"w":24,"h":8},
"datasource": "prometheus",
"targets": [
{
"refId": "A",
"expr": "histogram_quantile(0.95, sum by (le) (rate(http_request_duration_seconds_bucket[2m])))"
}
]
}
]
}

View File

@ -0,0 +1,58 @@
{
"title": "Logs by Service (Dev)",
"uid": "dev-logs-by-service",
"tags": ["dev","logs","loki"],
"time": { "from": "now-1h", "to": "now" },
"schemaVersion": 42,
"templating": {
"list": [
{
"name": "service",
"label": "Service (label app)",
"type": "query",
"datasource": "loki",
"query": "label_values({app!=\"\"}, app)",
"includeAll": true,
"multi": true,
"refresh": 2,
"current": {}
}
]
},
"panels": [
{
"type": "logs",
"title": "Logs $service",
"gridPos": { "x": 0, "y": 0, "w": 24, "h": 12 },
"datasource": "loki",
"options": {
"showLabels": true,
"showTime": true,
"wrapLogMessage": true,
"prettifyLogMessage": true
},
"targets": [
{
"refId": "A",
"expr": "{app=~\"$service\"}"
}
]
},
{
"type": "timeseries",
"title": "Error rate (logs/min) $service",
"gridPos": { "x": 0, "y": 12, "w": 24, "h": 8 },
"datasource": "loki",
"fieldConfig": { "defaults": { "unit": "ops" }, "overrides": [] },
"options": {
"legend": { "showLegend": true, "placement": "bottom" }
},
"targets": [
{
"refId": "A",
"expr": "sum by (app)(rate({app=~\"$service\"} |~ \"(?i)(error|exception|fail|timeout)\"[5m])) * 60"
}
]
}
]
}

View File

@ -0,0 +1,49 @@
{
"title": "Infra Overview (Ops)",
"uid": "ops-infra-overview",
"tags": ["ops","infrastructure"],
"time": { "from": "now-1h", "to": "now" },
"schemaVersion": 42,
"panels": [
{
"type": "timeseries",
"title": "CPU Utilization (%)",
"gridPos": {"x":0,"y":0,"w":12,"h":8},
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "percent" }, "overrides": [] },
"targets": [
{ "refId": "A", "expr": "100 - (avg by(instance)(rate(node_cpu_seconds_total{mode=\"idle\"}[2m]))*100)" }
]
},
{
"type": "timeseries",
"title": "Memory Utilization (%)",
"gridPos": {"x":12,"y":0,"w":12,"h":8},
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "percent" }, "overrides": [] },
"targets": [
{ "refId": "A", "expr": "((node_memory_MemTotal_bytes - node_memory_MemAvailable_bytes) / node_memory_MemTotal_bytes) * 100" }
]
},
{
"type": "timeseries",
"title": "Disk Space Used (bytes)",
"gridPos": {"x":0,"y":8,"w":12,"h":8},
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "bytes" }, "overrides": [] },
"targets": [
{ "refId": "A", "expr": "node_filesystem_size_bytes{fstype!~\"tmpfs|overlay\"} - node_filesystem_avail_bytes{fstype!~\"tmpfs|overlay\"}" }
]
},
{
"type": "timeseries",
"title": "Host Power (Watts) - Scaphandre",
"gridPos": {"x":12,"y":8,"w":12,"h":8},
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "watt" }, "overrides": [] },
"targets": [
{ "refId": "A", "expr": "scaph_host_power_microwatts / 1e6" }
]
}
]
}

View File

@ -0,0 +1,86 @@
{
"title": "Prometheus Health (Ops)",
"uid": "ops-prom-health",
"tags": ["ops","prometheus","health"],
"time": { "from": "now-1h", "to": "now" },
"schemaVersion": 42,
"panels": [
{
"type": "stat",
"title": "Targets DOWN (total)",
"gridPos": { "x": 0, "y": 0, "w": 6, "h": 4 },
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "none" }, "overrides": [] },
"options": { "reduceOptions": { "calcs": ["lastNotNull"], "fields": "" } },
"targets": [
{ "refId": "A", "expr": "sum(1 - up)" }
]
},
{
"type": "stat",
"title": "Alerts firing",
"gridPos": { "x": 6, "y": 0, "w": 6, "h": 4 },
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "none" }, "overrides": [] },
"options": { "reduceOptions": { "calcs": ["lastNotNull"], "fields": "" } },
"targets": [
{ "refId": "A", "expr": "count(ALERTS{alertstate=\"firing\"})" }
]
},
{
"type": "stat",
"title": "Ingest rate (samples/s)",
"gridPos": { "x": 12, "y": 0, "w": 12, "h": 4 },
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "ops" }, "overrides": [] },
"options": { "reduceOptions": { "calcs": ["lastNotNull"] } },
"targets": [
{ "refId": "A", "expr": "rate(prometheus_tsdb_head_samples_appended_total[5m])" }
]
},
{
"type": "timeseries",
"title": "Targets UP by job",
"gridPos": { "x": 0, "y": 4, "w": 12, "h": 8 },
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "none" }, "overrides": [] },
"options": { "legend": { "showLegend": true, "placement": "bottom" } },
"targets": [
{ "refId": "A", "expr": "sum by(job)(up)" }
]
},
{
"type": "timeseries",
"title": "Scrape duration (s) by job",
"gridPos": { "x": 12, "y": 4, "w": 12, "h": 8 },
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "s" }, "overrides": [] },
"options": { "legend": { "showLegend": true, "placement": "bottom" } },
"targets": [
{ "refId": "A", "expr": "avg by(job)(scrape_duration_seconds)" }
]
},
{
"type": "timeseries",
"title": "Rule group eval duration (s)",
"gridPos": { "x": 0, "y": 12, "w": 12, "h": 8 },
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "s" }, "overrides": [] },
"options": { "legend": { "showLegend": true, "placement": "bottom" } },
"targets": [
{ "refId": "A", "expr": "max by(rule_group) (prometheus_rule_group_last_duration_seconds)" }
]
},
{
"type": "timeseries",
"title": "TSDB head chunks",
"gridPos": { "x": 12, "y": 12, "w": 12, "h": 8 },
"datasource": "prometheus",
"fieldConfig": { "defaults": { "unit": "short" }, "overrides": [] },
"options": { "legend": { "showLegend": true, "placement": "bottom" } },
"targets": [
{ "refId": "A", "expr": "prometheus_tsdb_head_chunks" }
]
}
]
}

View File

@ -0,0 +1,65 @@
{
"title": "Logs Ops (Errors & System)",
"uid": "ops-logs-errors",
"tags": ["ops","logs","loki"],
"time": { "from": "now-2h", "to": "now" },
"schemaVersion": 42,
"templating": {
"list": [
{
"name": "job",
"label": "Job",
"type": "query",
"datasource": "loki",
"query": "label_values(job)",
"includeAll": true,
"multi": true,
"refresh": 2
},
{
"name": "instance",
"label": "Instance",
"type": "query",
"datasource": "loki",
"query": "label_values({job=~\"$job\"}, instance)",
"includeAll": true,
"multi": true,
"refresh": 2
}
]
},
"panels": [
{
"type": "timeseries",
"title": "Error rate by instance (logs/min)",
"gridPos": { "x": 0, "y": 0, "w": 24, "h": 8 },
"datasource": "loki",
"fieldConfig": { "defaults": { "unit": "ops" }, "overrides": [] },
"options": { "legend": { "showLegend": true, "placement": "bottom" } },
"targets": [
{
"refId": "A",
"expr": "sum by (instance)(rate({job=~\"$job\", instance=~\"$instance\"} |~ \"(?i)(critical|error|err)\"[5m])) * 60"
}
]
},
{
"type": "logs",
"title": "Recent critical & errors $job / $instance",
"gridPos": { "x": 0, "y": 8, "w": 24, "h": 14 },
"datasource": "loki",
"options": {
"showLabels": true,
"showTime": true,
"wrapLogMessage": true,
"prettifyLogMessage": true
},
"targets": [
{
"refId": "A",
"expr": "{job=~\"$job\", instance=~\"$instance\"} |~ \"(?i)(critical|error|err|panic|oom)\""
}
]
}
]
}

View File

@ -0,0 +1,7 @@
apiVersion: 1
folders:
- uid: ops-folder
title: "Ops Infrastructure & Plateforme"
- uid: dev-folder
title: "Dev Application & Qualité"

View File

@ -0,0 +1,12 @@
apiVersion: 1
providers:
- name: "dev-dashboards"
orgId: 1
folderUid: "dev-folder"
type: file
disableDeletion: false
editable: true
updateIntervalSeconds: 30
options:
path: /var/lib/grafana/dashboards/dev
foldersFromFilesStructure: true

View File

@ -0,0 +1,12 @@
apiVersion: 1
providers:
- name: "ops-dashboards"
orgId: 1
folderUid: "ops-folder"
type: file
disableDeletion: false
editable: true
updateIntervalSeconds: 30
options:
path: /var/lib/grafana/dashboards/ops
foldersFromFilesStructure: true

View File

@ -0,0 +1,8 @@
apiVersion: 1
datasources:
- name: prometheus
type: prometheus
access: proxy
url: http://prometheus:9090
isDefault: true
editable: true

View File

@ -8,11 +8,11 @@ scrape_configs:
- job_name: 'observabilite'
static_configs:
- targets: ['192.168.56.16:9100'] #ip de la machine observabilite
- targets: ['192.168.4.4:9100'] #ip de la machine observabilite
- job_name: 'scaphandre'
static_configs:
- targets: ['192.168.56.16:8080']
- targets: ['192.168.4.4:8080']
fallback_scrape_protocol: "PrometheusText1.0.0"
- job_name: 'apache_vmservices'
@ -22,3 +22,9 @@ scrape_configs:
- job_name: 'vms'
static_configs:
- targets: ['192.168.56.18:9100'] #ip vmHardware
- targets: ['192.168.56.17:9100'] #ip vmServices
- targets: ['192.168.56.15:9100'] #ip vmApplicatifs
- job_name: 'tomcat'
static_configs:
- targets: ['192.168.56.15:9082'] #ip vmApplicatif

View File

@ -1,113 +0,0 @@
1. Préparation de la VM Applicatif (192.168.56.30)
1.1 Mise à jour du système
sudo apt update && sudo apt upgrade -y
1.2 Installation de Java et Maven
sudo apt install -y openjdk-17-jdk maven
java -version
mvn -version
1.3 Installation de Tomcat
cd /opt
sudo wget https://dlcdn.apache.org/tomcat/tomcat-10/v10.1.47/bin/apache-tomcat-10.1.47.tar.gz
sudo tar -xvzf apache-tomcat-10.1.47.tar.gz
sudo mv apache-tomcat-10.1.47 tomcat
sudo chown -R vagrant:vagrant /opt/tomcat
nano ~/.bashrc
export CATALINA_HOME=/opt/tomcat
export PATH=$PATH:$CATALINA_HOME/bin
source ~/.bashrc
sudo nano /etc/systemd/system/tomcat.service
------------------
[Unit]
Description=Apache Tomcat 10.1.47
After=network.target
[Service]
Type=forking
Environment=JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64
Environment=CATALINA_HOME=/opt/tomcat
ExecStart=/opt/tomcat/bin/startup.sh
ExecStop=/opt/tomcat/bin/shutdown.sh
User=vagrant
Group=vagrant
RestartSec=10
Restart=always
[Install]
WantedBy=multi-user.target
----------------------------------------------
sudo systemctl daemon-reload
sudo systemctl enable tomcat
sudo systemctl start tomcat
sudo systemctl status tomcat
1.4 Déploiement de lapplication Java
Copie du fichier .war depuis le PC hôte: # debrouille toi
scp target/archiweb-api-1.0.0.war vagrant@192.168.56.31:/tmp/
sudo cp /tmp/archiweb-api-1.0.0.war /opt/tomcat/webapps/
sudo mv /opt/tomcat/webapps/archiweb-api-1.0.0.war /opt/tomcat/webapps/api.war
2. Base de données PostgreSQL (hébergée sur la vm)
2.1
sudo apt update && sudo apt upgrade -y
sudo apt install postgresql postgresql-contrib -y
sudo systemctl status postgresql(Facultatif)
Vérifier postgresql.conf afin que # listen_addresses = 'localhost'
La commande pour le faire :sudo nano /etc/postgresql/15/main/postgresql.conf
2.2 Création de la base
sudo apt update
sudo apt upgrade -y
sudo apt install -y wget gnupg lsb-release
# Ajouter le dépôt officiel PostgreSQL
echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | sudo tee /etc/apt/sources.list.d/pgdg.list
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo apt update
# Installer le serveur PostgreSQL
sudo apt install -y postgresql-18
sudo -u postgres psql <<EOF
CREATE DATABASE archiweb_db;
CREATE USER archiweb_user WITH ENCRYPTED PASSWORD 'archiweb_pass';
GRANT ALL PRIVILEGES ON DATABASE archiweb_db TO archiweb_user;
\q
EOF
sudo nano /etc/postgresql/18/main/pg_hba.conf
local all all md5
sudo systemctl restart postgresql
sudo systemctl status postgresql
sudo -u postgres psql
DROP DATABASE IF EXISTS archiweb_db;
CREATE DATABASE archiweb_db OWNER archiweb_user;
\q
psql -U archiweb_user -d archiweb_db -f /vagrant/database/schema.sql
psql -U archiweb_user -d archiweb_db -f /vagrant/database/data.sql
Mdp:(archiweb_pass)
http://192.168.56.31:8080/api/utilisateurs

View File

@ -1,98 +0,0 @@
sudo mkdir -p /opt/promtail
cd /opt/promtail
sudo wget https://github.com/grafana/loki/releases/download/v3.1.1/promtail-linux-amd64.zip
sudo apt install -y unzip # si nécessaire
sudo unzip promtail-linux-amd64.zip
sudo mv promtail-linux-amd64 promtail
sudo chmod +x promtail
sudo useradd -r -s /bin/false promtail || true
sudo chown -R promtail:promtail /opt/promtail
root@vmApplicatif:~# cd /etc/
root@vmApplicatif:/etc# mkdir promtail
cd promtail
root@vmApplicatif:/etc/promtail# nano config-promtail.yaml
-------------
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /tmp/positions.yaml
clients:
- url: http://192.168.56.30:3100/loki/api/v1/push
scrape_configs:
- job_name: tomcat
static_configs:
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: catalina
__path__: /opt/tomcat/logs/catalina*.log
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: localhost
__path__: /opt/tomcat/logs/localhost*.log
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: access
__path__: /opt/tomcat/logs/localhost_access_log*.txt
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: out
__path__: /opt/tomcat/logs/catalina.out
---------------------------
nano /etc/systemd/system/promtail.service
[Unit]
Description=Promtail service
After=network.target
[Service]
User=promtail
ExecStart=/opt/promtail/promtail-linux-amd64 -config.file=/etc/promtail/config-promtail.yaml
Restart=always
[Install]
WantedBy=multi-user.target
--------------------------
sudo systemctl daemon-reload
sudo systemctl enable --now promtail
sudo systemctl start promtail
sudo systemctl status promtail
sudo ls -ld /opt/tomcat/logs
sudo ls -l /opt/tomcat/logs | head
# Donner la propriété complète à tomcat
sudo chown -R tomcat:tomcat /opt/tomcat/logs
# Donner les droits d'accès comme dans ton exemple
sudo chmod 750 /opt/tomcat/logs
# Donner des droits de lecture/écriture classiques aux fichiers de logs
sudo chmod 640 /opt/tomcat/logs/*

View File

@ -1,111 +0,0 @@
-- Ajout des rôles
INSERT INTO role (nom)
VALUES ('Administrateur'),
('Enseignant'),
('Étudiant');
-- Ajout des utilisateurs
INSERT INTO utilisateur (login, mot_de_passe, nom, prenom, email)
VALUES ('jlpicar', 'password', 'Picard', 'Jean-Luc', 'jean-luc.picard@starfleet.com'),
('kjaneway', 'password', 'Janeway', 'Kathryn', 'kathryn.janeway@starfleet.com'),
('spock', 'password', '', 'Spock', 'spock@starfleet.com'),
('wriker', 'password', 'Riker', 'William', 'william.riker@starfleet.com'),
('data', 'password', '', 'Data', 'data@starfleet.com'),
('glaforge', 'password', 'LaForge', 'Geordi', 'geordi.laforge@starfleet.com'),
('tuvok', 'password', 'Tuvok', '', 'tuvok@starfleet.com'),
('wcrusher', 'password', 'Crusher', 'Wesley', 'wesley.crusher@starfleet.com'),
('tparis', 'password', 'Paris', 'Tom', 'tom.paris@starfleet.com'),
('hkim', 'password', 'Kim', 'Harry', 'harry.kim@starfleet.com'),
('jsisko', 'password', 'Sisko', 'Jake', 'jake.sisko@starfleet.com'),
('nog', 'password', '', 'Nog', 'nog@starfleet.com'),
('bmariner', 'password', 'Mariner', 'Beckett', 'beckett.mariner@starfleet.com'),
('bboimler', 'password', 'Boimler', 'Brad', 'brad.boimler@starfleet.com'),
('dvtendi', 'password', 'Tendi', 'D''Vana', 'd-vana.tendi@starfleet.com'),
('srutherf', 'password', 'Rutherford', 'Sam', 'sam.rutherford@starfleet.com'),
('drel', 'password', 'R''El', 'Dal', 'dal.r-el@starfleet.com'),
('gwyndala', 'password', '', 'Gwyndala', 'gwyndala@starfleet.com'),
('roktahk', 'password', '', 'Rok-Tahk', 'rok-takh@starfleet.com'),
('zero', 'password', 'Zero', '', 'zero@starfleet.com'),
('jpog', 'password', 'Pog', 'Jankom', 'jankom.pog@starfleet.com'),
('murf', 'password', '', 'Murf', 'murf@starfleet.com');
-- Ajout des rôles aux utilisateurs
INSERT INTO utilisateur_role (utilisateur_id, role_id)
VALUES ((select id from utilisateur where login = 'jlpicar'), 1), -- Picard as Administrator
((select id from utilisateur where login = 'kjaneway'), 1), -- Janeway as Administrator
((select id from utilisateur where login = 'spock'), 2), -- Spock as Teacher
((select id from utilisateur where login = 'wriker'), 2), -- Riker as Teacher
((select id from utilisateur where login = 'data'), 2), -- Data as Teacher
((select id from utilisateur where login = 'glaforge'), 2), -- LaForge as Teacher
((select id from utilisateur where login = 'tuvok'), 2), -- Tuvok as Teacher
((select id from utilisateur where login = 'wcrusher'), 3), -- Crusher as Student
((select id from utilisateur where login = 'tparis'), 3), -- Paris as Student
((select id from utilisateur where login = 'hkim'), 3), -- Kim as Student
((select id from utilisateur where login = 'jsisko'), 3), -- Sisko as Student
((select id from utilisateur where login = 'nog'), 3), -- Nog as Student
((select id from utilisateur where login = 'bmariner'), 3), -- Mariner as Student
((select id from utilisateur where login = 'bboimler'), 3), -- Boimler as Student
((select id from utilisateur where login = 'dvtendi'), 3), -- Tendi as Student
((select id from utilisateur where login = 'srutherf'), 3), -- Rutherford as Student
((select id from utilisateur where login = 'drel'), 3), -- R'El as Student
((select id from utilisateur where login = 'gwyndala'), 3), -- Gwyndala as Student
((select id from utilisateur where login = 'roktahk'), 3), -- Rok-Tahk as Student
((select id from utilisateur where login = 'zero'), 3), -- Zero as Student
((select id from utilisateur where login = 'jpog'), 3), -- Pog as Student
((select id from utilisateur where login = 'murf'), 3);
-- Murf as Student
-- Ajout des cours
INSERT INTO cours (nom, date_debut)
VALUES ('Starship Command', '2023-01-01'),
('Warp Theory', '2023-02-01'),
('Federation Ethics', '2023-03-01');
-- Ajout des enseignements
INSERT INTO enseignement (utilisateur_id, cours_id)
VALUES ((select id from utilisateur where login = 'spock'), 1), -- Spock teaches Starship Command
((select id from utilisateur where login = 'wriker'), 1), -- Riker teaches Starship Command
((select id from utilisateur where login = 'data'), 2), -- Data teaches Warp Theory
((select id from utilisateur where login = 'glaforge'), 2), -- LaForge teaches Warp Theory
((select id from utilisateur where login = 'tuvok'), 3);
-- Tuvok teaches Federation Ethics
-- Ajout des inscriptions
INSERT INTO inscription (utilisateur_id, cours_id)
VALUES ((select id from utilisateur where login = 'wcrusher'), 1), -- Crusher enrolls in Starship Command
((select id from utilisateur where login = 'wcrusher'), 2), -- Crusher enrolls in Warp Theory
((select id from utilisateur where login = 'tparis'), 1), -- Paris enrolls in Starship Command
((select id from utilisateur where login = 'hkim'), 1), -- Kim enrolls in Starship Command
((select id from utilisateur where login = 'jsisko'), 3), -- Sisko enrolls in Federation Ethics
((select id from utilisateur where login = 'nog'), 1), -- Nog enrolls in Starship Command
((select id from utilisateur where login = 'nog'), 3), -- Nog enrolls in Federation Ethics
((select id from utilisateur where login = 'bboimler'), 1), -- Boimler enrolls in Starship Command
((select id from utilisateur where login = 'dvtendi'), 2), -- Tendi enrolls in Warp Theory
((select id from utilisateur where login = 'srutherf'), 2), -- Rutherford enrolls in Warp Theory
((select id from utilisateur where login = 'drel'), 1), -- R'El enrolls in Starship Command
((select id from utilisateur where login = 'gwyndala'), 1), -- Gwyndala enrolls in Starship Command
((select id from utilisateur where login = 'gwyndala'), 3), -- Gwyndala enrolls in Federation Ethics
((select id from utilisateur where login = 'roktahk'), 2), -- Rok-Tahk enrolls in Warp Theory
((select id from utilisateur where login = 'zero'), 3), -- Zero enrolls in Federation Ethics
((select id from utilisateur where login = 'jpog'), 2);
-- Pog enrolls in Warp Theory
-- Ajout des contenus de cours
INSERT INTO contenu_cours (cours_id, titre, description)
VALUES (1, 'Bridge Operations', 'Learn how to manage starship operations.'),
(1, 'Tactical Maneuvers', 'Advanced starship tactics.'),
(1, 'Command Decision Making', 'Develop leadership skills for commanding a starship.'),
(2, 'Warp Core Mechanics', 'Understand the inner workings of warp cores.'),
(2, 'Dilithium Crystals', 'Study the properties of dilithium crystals.'),
(2, 'Warp Field Theory', 'Explore the physics behind warp travel.'),
(3, 'Prime Directive', 'Ethical dilemmas in space exploration.'),
(3, 'Cultural Sensitivity', 'Learn how to interact with alien civilizations.'),
(3, 'Federation Law', 'Understand the legal framework of the Federation.');
-- Ajout des devoirs
INSERT INTO devoir (cours_id, titre, description, date_debut, date_fin)
VALUES (1, 'Bridge Simulation', 'Participate in a simulated bridge operation.', '2023-01-15', '2023-01-30'),
(1, 'Tactical Exercise', 'Plan and execute tactical maneuvers.', '2023-01-20', '2023-02-05'),
(2, 'Warp Core Analysis', 'Analyze the efficiency of a warp core.', '2023-02-10', '2023-02-25'),
(2, 'Crystal Experiment', 'Experiment with dilithium crystals.', '2023-02-15', '2023-03-01'),
(3, 'Prime Directive Case Study', 'Discuss ethical dilemmas.', '2023-03-10', '2023-03-25');

View File

@ -1,136 +0,0 @@
-- Drop tables if they exist
DROP TABLE IF EXISTS note;
DROP TABLE IF EXISTS rendu_devoir;
DROP TABLE IF EXISTS devoir;
DROP TABLE IF EXISTS contenu_cours;
DROP TABLE IF EXISTS inscription;
DROP TABLE IF EXISTS enseignement;
DROP TABLE IF EXISTS cours;
DROP TABLE IF EXISTS utilisateur_role;
DROP TABLE IF EXISTS role;
DROP TABLE IF EXISTS utilisateur;
-- Table utilisateur : représente un utilisateur de l'application
CREATE TABLE utilisateur
(
id BIGSERIAL NOT NULL,
login VARCHAR(8) NOT NULL,
mot_de_passe VARCHAR(255) NOT NULL,
nom VARCHAR(100),
prenom VARCHAR(100),
email VARCHAR(255),
PRIMARY KEY (id),
CONSTRAINT ux_u_login UNIQUE (login),
CONSTRAINT ux_u_email UNIQUE (email)
);
CREATE INDEX ix_u_login ON utilisateur (login);
-- Table role : représente un rôle d'utilisateur
CREATE TABLE role
(
id BIGSERIAL NOT NULL,
nom VARCHAR(100) NOT NULL,
PRIMARY KEY (id),
CONSTRAINT ux_r_nom UNIQUE (nom)
);
CREATE INDEX ix_r_nom ON role (nom);
-- Table utilisateur_role : association entre utilisateurs et rôles
CREATE TABLE utilisateur_role
(
utilisateur_id BIGINT NOT NULL,
role_id BIGINT NOT NULL,
PRIMARY KEY (utilisateur_id, role_id),
CONSTRAINT fk_ur_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE,
CONSTRAINT fk_ur_role_id FOREIGN KEY (role_id) REFERENCES role (id) ON DELETE CASCADE
);
CREATE INDEX ix_ur_utilisateur_id ON utilisateur_role (utilisateur_id);
CREATE INDEX ix_ur_role_id ON utilisateur_role (role_id);
-- Table cours : représente un cours
CREATE TABLE cours
(
id BIGSERIAL NOT NULL,
nom VARCHAR(100) NOT NULL,
date_debut DATE NOT NULL,
PRIMARY KEY (id)
);
CREATE INDEX ix_c_nom ON cours (nom);
-- Table enseignement : association entre utilisateurs et cours
CREATE TABLE enseignement
(
utilisateur_id BIGINT NOT NULL,
cours_id BIGINT NOT NULL,
PRIMARY KEY (utilisateur_id, cours_id),
CONSTRAINT fk_e_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE,
CONSTRAINT fk_e_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_e_utilisateur_id ON enseignement (utilisateur_id);
CREATE INDEX ix_e_cours_id ON enseignement (cours_id);
-- Table inscription : association entre utilisateurs et cours
CREATE TABLE inscription
(
utilisateur_id BIGINT NOT NULL,
cours_id BIGINT NOT NULL,
date_inscription TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (utilisateur_id, cours_id),
CONSTRAINT fk_i_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE,
CONSTRAINT fk_i_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_i_utilisateur_id ON inscription (utilisateur_id);
CREATE INDEX ix_i_cours_id ON inscription (cours_id);
-- Table contenu_cours : représente le contenu d'un cours
CREATE TABLE contenu_cours
(
id BIGSERIAL NOT NULL,
cours_id BIGINT NOT NULL,
titre VARCHAR(255) NOT NULL,
description TEXT,
PRIMARY KEY (id),
CONSTRAINT fk_cc_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_cc_cours_id ON contenu_cours (cours_id);
-- Table devoir : représente un devoir associé à un cours
CREATE TABLE devoir
(
id BIGSERIAL NOT NULL,
cours_id BIGINT NOT NULL,
titre VARCHAR(255) NOT NULL,
description TEXT,
date_debut TIMESTAMP NOT NULL,
date_fin TIMESTAMP NOT NULL,
PRIMARY KEY (id),
CONSTRAINT fk_d_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_d_cours_id ON devoir (cours_id);
-- Table rendu_devoir : représente le rendu d'un devoir par un utilisateur
CREATE TABLE rendu_devoir
(
id BIGSERIAL NOT NULL,
devoir_id BIGINT NOT NULL,
utilisateur_id BIGINT NOT NULL,
date_rendu TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
contenu TEXT,
PRIMARY KEY (id),
CONSTRAINT fk_rd_devoir_id FOREIGN KEY (devoir_id) REFERENCES devoir (id) ON DELETE CASCADE,
CONSTRAINT fk_rd_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE
);
CREATE INDEX ix_rd_devoir_id ON rendu_devoir (devoir_id);
CREATE INDEX ix_rd_utilisateur_id ON rendu_devoir (utilisateur_id);
-- Table note : représente une note attribuée à un rendu de devoir
CREATE TABLE note
(
id BIGSERIAL NOT NULL,
rendu_devoir_id BIGINT NOT NULL,
valeur DECIMAL(5, 2) NOT NULL CHECK (valeur >= 0 AND valeur <= 20),
date_attribution TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (id),
CONSTRAINT fk_n_rendu_devoir_id FOREIGN KEY (rendu_devoir_id) REFERENCES rendu_devoir (id) ON DELETE CASCADE
);
CREATE INDEX ix_n_rendu_devoir_id ON note (rendu_devoir_id);

View File

@ -1,38 +0,0 @@
auth_enabled: false
server:
http_listen_port: 3100
ingester:
lifecycler:
address: 127.0.0.1
ring:
kvstore:
store: inmemory
replication_factor: 1
chunk_idle_period: 5m
chunk_target_size: 1048576
max_transfer_retries: 0
schema_config:
configs:
- from: 2020-10-24
store: boltdb
object_store: filesystem
schema: v11
index:
prefix: index_
period: 24h
storage_config:
boltdb:
directory: /loki/index
filesystem:
directory: /loki/chunks
limits_config:
reject_old_samples: true
reject_old_samples_max_age: 168h
chunk_store_config:
max_look_back_period: 0s

View File

@ -1,70 +0,0 @@
services:
prometheus:
image: prom/prometheus:latest
container_name: prometheus-observability
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- "9090:9090"
networks:
- observability
grafana:
image: grafana/grafana:latest
container_name: grafana-observability
ports:
- "3000:3000"
volumes:
- grafana-data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=admin
networks:
- observability
depends_on:
- loki
loki:
image: grafana/loki:2.8.2
container_name: loki-observability
ports:
- "3100:3100"
command: -config.file=/etc/loki/local-config.yaml
volumes:
- ./loki-config.yaml:/etc/loki/local-config.yaml
- ./loki-wal:/wal
- ./loki-chunks:/loki/chunks
- ./loki-index:/loki/index
networks:
- observability
promtail:
image: grafana/promtail:2.8.2
container_name: promtail-observability
volumes:
- ./promtail-config.yaml:/etc/promtail/config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
command:
- -config.file=/etc/promtail/config.yaml
depends_on:
- loki
networks:
- observability
node_exporter:
image: prom/node-exporter:latest
container_name: node-exporter
restart: unless-stopped
pid: "host"
network_mode: "host"
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro
- /:/rootfs:ro
command:
- '--path.procfs=/host/proc'
- '--path.sysfs=/host/sys'
- '--collector.filesystem.ignored-mount-points="^/(sys|proc|dev|host|etc)($$|/)"'
volumes:
grafana-data:
networks:
observability:
driver: bridge

View File

@ -1,38 +0,0 @@
auth_enabled: false
server:
http_listen_port: 3100
ingester:
wal:
enabled: true
chunk_idle_period: 5m
chunk_retain_period: 30s
chunk_target_size: 1048576
max_transfer_retries: 0
lifecycler:
ring:
kvstore:
store: inmemory
replication_factor: 1
schema_config:
configs:
- from: 2020-10-24
store: boltdb
object_store: filesystem
schema: v11
index:
prefix: index_
period: 168h
storage_config:
boltdb:
directory: /loki/index
filesystem:
directory: /loki/chunks
limits_config:
enforce_metric_name: false
reject_old_samples: true
reject_old_samples_max_age: 168h

View File

@ -1,16 +0,0 @@
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']
- job_name: 'node_exporter'
static_configs:
- targets: ['192.168.4.4:9100']
- job_name: 'scaphandre'
static_configs:
- targets: ['192.168.4.4:8080']
fallback_scrape_protocol: "PrometheusText1.0.0"

View File

@ -1,26 +0,0 @@
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /tmp/positions.yaml
clients:
- url: http://loki:3100/loki/api/v1/push
scrape_configs:
- job_name: docker_logs
static_configs:
- targets:
- localhost
labels:
job: docker
__path__: /var/lib/docker/containers/*/*.log
- job_name: system_logs
static_configs:
- targets:
- localhost
labels:
job: syslog
__path__: /var/log/*.log

View File

@ -1,104 +0,0 @@
#!/bin/bash
set -e
# Variables
APACHE_EXPORTER_VERSION="1.0.10"
PROMTAIL_VERSION="2.9.0"
PROMTAIL_CONFIG="/etc/promtail-config.yaml"
LOKI_URL="http://192.168.56.20:3100/loki/api/v1/push"
echo "=== Installation des dépendances ==="
sudo apt update
sudo apt install -y wget unzip apache2
echo "=== Installation et configuration de apache_exporter ==="
cd /opt
wget https://github.com/Lusitaniae/apache_exporter/releases/download/v${APACHE_EXPORTER_VERSION}/apache_exporter-${APACHE_EXPORTER_VERSION}.linux-amd64.tar.gz
tar -xvf apache_exporter-${APACHE_EXPORTER_VERSION}.linux-amd64.tar.gz
sudo mv apache_exporter-${APACHE_EXPORTER_VERSION}.linux-amd64/apache_exporter /usr/local/bin/
# Création du service systemd pour apache_exporter
sudo tee /etc/systemd/system/apache_exporter.service > /dev/null <<EOL
[Unit]
Description=Prometheus Apache Exporter
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/apache_exporter --scrape_uri="http://localhost/server-status?auto"
[Install]
WantedBy=multi-user.target
EOL
echo "=== Activation du module status d'Apache ==="
sudo a2enmod status
sudo systemctl restart apache2
echo "=== Démarrage du service apache_exporter ==="
sudo systemctl daemon-reload
sudo systemctl enable apache_exporter
sudo systemctl start apache_exporter
sudo systemctl status apache_exporter --no-pager
echo "=== Installation et configuration de Promtail ==="
cd /opt
wget https://github.com/grafana/loki/releases/download/v${PROMTAIL_VERSION}/promtail-linux-amd64.zip
unzip promtail-linux-amd64.zip
sudo mv promtail-linux-amd64 /usr/local/bin/promtail
sudo chmod +x /usr/local/bin/promtail
# Création du fichier de configuration Promtail
sudo tee ${PROMTAIL_CONFIG} > /dev/null <<EOL
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /var/log/promtail-positions.yaml
clients:
- url: ${LOKI_URL}
scrape_configs:
- job_name: apache_access
static_configs:
- targets: ['localhost']
labels:
job: apache_access
host: apache_vm
__path__: /var/log/apache2/access.log
- job_name: apache_error
static_configs:
- targets: ['localhost']
labels:
job: apache_error
host: apache_vm
__path__: /var/log/apache2/error.log
EOL
# Création du service systemd pour Promtail
sudo tee /etc/systemd/system/promtail.service > /dev/null <<EOL
[Unit]
Description=Promtail service for Apache logs
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/promtail -config.file ${PROMTAIL_CONFIG}
Restart=always
[Install]
WantedBy=multi-user.target
EOL
echo "=== Démarrage du service Promtail ==="
sudo systemctl daemon-reload
sudo systemctl enable promtail
sudo systemctl start promtail
sudo systemctl status promtail --no-pager
echo "=== Installation terminée ==="

View File

@ -1,124 +0,0 @@
Étape 1 : Installer Apache sur la machine à superviser
Pour vérifier si Apache est déjà installé :
dpkg -l | grep apache2
Si la commande retourne un numéro de version, Apache est déjà installé. Sinon :
Sur Ubuntu/Debian :
sudo apt update
sudo apt install apache2 -y
sudo systemctl enable apache2
sudo systemctl start apache2
sudo systemctl status apache2
Étape 2 : Installer le module de métriques pour Prometheus
Prometheus ne peut pas lire directement les métriques Apache. On utilise apache_exporter.
2.1 Télécharger apache_exporter
cd /opt
sudo wget https://github.com/Lusitaniae/apache_exporter/releases/download/v1.0.10/apache_exporter-1.0.10.linux-amd64.tar.gz
sudo tar -xvf apache_exporter-1.0.10.linux-amd64.tar.gz
sudo mv apache_exporter-1.0.10.linux-amd64/apache_exporter /usr/local/bin/
2.2 Créer un service systemd pour apache_exporter
sudo nano /etc/systemd/system/apache_exporter.service
------------->
[Unit]
Description=Prometheus Apache Exporter
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/apache_exporter --scrape_uri="http://localhost/server-status?auto"
[Install]
WantedBy=multi-user.target
------------->
2.3 Activer le module status dApache
sudo a2enmod status
sudo systemctl restart apache2
2.4 Démarrer apache_exporter
sudo systemctl daemon-reload
sudo systemctl enable apache_exporter
sudo systemctl start apache_exporter
sudo systemctl status apache_exporter
Étape 4 : Collecter les logs Apache via Loki + Promtail
4.1 Installer Promtail sur la machine supervisée
Si Promtail nest pas encore installé, tu peux le télécharger depuis Grafana :
cd /opt
sudo wget https://github.com/grafana/loki/releases/download/v2.9.0/promtail-linux-amd64.zip
sudo unzip promtail-linux-amd64.zip
sudo mv promtail-linux-amd64 /usr/local/bin/promtail
sudo chmod +x /usr/local/bin/promtail
4.2 Créer un fichier de config pour Promtail
sudo nano /etc/promtail-config.yaml
------------->
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /var/log/promtail-positions.yaml
clients:
- url: http://<IP_VM_SUPERVISION>:3100/loki/api/v1/push
scrape_configs:
- job_name: apache_access
static_configs:
- targets: ['localhost']
labels:
job: apache_access
host: apache_vm
__path__: /var/log/apache2/access.log
- job_name: apache_error
static_configs:
- targets: ['localhost']
labels:
job: apache_error
host: apache_vm
__path__: /var/log/apache2/error.log
------------->
4.3 Créer un service systemd pour Promtail
sudo nano /etc/systemd/system/promtail.service
------------->
[Unit]
Description=Promtail service for Apache logs
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/promtail -config.file /etc/promtail-config.yaml
Restart=always
[Install]
WantedBy=multi-user.target
------------->
sudo systemctl daemon-reload
sudo systemctl enable promtail
sudo systemctl start promtail
sudo systemctl status promtail

View File

@ -1,56 +0,0 @@
version: "3.8"
services:
prometheus:
image: prom/prometheus:latest
container_name: prometheus-observability
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- "9090:9090"
networks:
- observability
grafana:
image: grafana/grafana:latest
container_name: grafana-observability
ports:
- "3000:3000"
volumes:
- grafana-data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=admin
networks:
- observability
depends_on:
- loki
loki:
image: grafana/loki:2.8.2
container_name: loki-observability
ports:
- "3100:3100"
command: -config.file=/etc/loki/local-config.yaml
volumes:
- ./loki-config.yaml:/etc/loki/local-config.yaml
- ./loki-wal:/wal
- ./loki-chunks:/loki/chunks
- ./loki-index:/loki/index
networks:
- observability
promtail:
image: grafana/promtail:2.8.2
container_name: promtail-observability
volumes:
- ./promtail-config.yaml:/etc/promtail/config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /var/log:/var/log:ro
command:
- -config.file=/etc/promtail/config.yaml
networks:
- observability
volumes:
grafana-data:
networks:
observability:
driver: bridge

View File

@ -1,59 +0,0 @@
auth_enabled: false
server:
http_listen_port: 3100
grpc_listen_port: 9096
common:
path_prefix: /loki
storage:
filesystem:
chunks_directory: /loki/chunks
rules_directory: /loki/rules
replication_factor: 1
ring:
instance_addr: 127.0.0.1
kvstore:
store: inmemory
ingester:
wal:
enabled: true
dir: /wal
flush_on_shutdown: true
chunk_idle_period: 5m
chunk_retain_period: 30s
max_chunk_age: 1h
lifecycler:
ring:
replication_factor: 1
schema_config:
configs:
- from: 2020-10-24
store: boltdb-shipper
object_store: filesystem
schema: v11
index:
prefix: index_
period: 24h
storage_config:
boltdb_shipper:
active_index_directory: /loki/index
cache_location: /loki/index
shared_store: filesystem
filesystem:
directory: /loki/chunks
limits_config:
enforce_metric_name: false
reject_old_samples: true
reject_old_samples_max_age: 168h
ingestion_rate_mb: 10
ingestion_burst_size_mb: 20
compactor:
working_directory: /loki/compactor
shared_store: filesystem
compaction_interval: 10m

View File

@ -1,15 +0,0 @@
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']
- job_name: 'node_exporter_vmservices'
static_configs:
- targets: ['192.168.56.31:9100']
- job_name: 'apache_vmservices'
static_configs:
- targets: ['192.168.56.31:9117']

View File

@ -1,19 +0,0 @@
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /tmp/positions.yaml
clients:
- url: http://loki:3100/loki/api/v1/push
scrape_configs:
- job_name: docker_logs
static_configs:
- targets:
- localhost
labels:
job: docker
__path__: /var/lib/docker/containers/*/*.log