Compare commits

..

28 Commits

Author SHA1 Message Date
59267d413f 28/10 2025-10-28 13:18:11 +01:00
2752cfe5bd 28/10 2025-10-28 13:11:42 +01:00
5e8fd3e2bf 28/10 2025-10-28 13:02:14 +01:00
06914042fa 28/10 2025-10-28 12:59:29 +01:00
69e72fff97 28/10 2025-10-28 12:56:06 +01:00
cd22fdac81 28/10 2025-10-28 12:52:46 +01:00
a310143065 28/10 2025-10-28 12:51:50 +01:00
3b81934792 28/10 2025-10-28 12:47:47 +01:00
a71fc1ee41 28/10 2025-10-28 12:37:08 +01:00
429e2530f7 28/10 2025-10-28 11:29:40 +01:00
4f52cc714b 28/10 2025-10-28 11:28:04 +01:00
9785feb5ef Delete observabilite_applicatif/note.txt 2025-10-28 10:21:58 +00:00
f0bc9b907a mise a jour 2025-10-28 10:21:21 +00:00
1491aef240 28/10 2025-10-28 11:20:23 +01:00
f471120316 28/10 2025-10-28 10:25:23 +01:00
cd8b52548b 28/10 2025-10-28 10:23:47 +01:00
31f2628538 mise a jour 2025-10-24 15:11:15 +00:00
6604b6d123 maj1 2025-10-24 13:02:15 +00:00
890a2fa1a3 maj 2025-10-24 08:33:31 +00:00
4a2a0b0181 tuto_miseàjour 2025-10-24 08:13:52 +00:00
3b6e6b9fce 24/10/25 2025-10-24 05:40:44 +02:00
209a53cbdf 23/10/25 2025-10-23 14:01:20 +02:00
5f461b7f0d 23/10/25 2025-10-23 13:41:56 +02:00
ea5a00a89f 23/10/25 2025-10-23 13:40:24 +02:00
566d3909a4 23/10/25 2025-10-23 13:37:14 +02:00
ec6060cd2c 23/10/25 2025-10-23 13:18:31 +02:00
b01e5c5f4f 23/10/25 2025-10-23 13:02:55 +02:00
afbe59ca68 23/10/25 2025-10-23 10:30:19 +02:00
34 changed files with 1570 additions and 26 deletions

View File

@ -1,7 +0,0 @@
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']

View File

@ -44,6 +44,7 @@ services:
volumes:
- ./promtail-config.yaml:/etc/promtail/config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /var/log:/var/log:ro
command:
- -config.file=/etc/promtail/config.yaml
networks:

View File

@ -0,0 +1,59 @@
auth_enabled: false
server:
http_listen_port: 3100
grpc_listen_port: 9096
common:
path_prefix: /loki
storage:
filesystem:
chunks_directory: /loki/chunks
rules_directory: /loki/rules
replication_factor: 1
ring:
instance_addr: 127.0.0.1
kvstore:
store: inmemory
ingester:
wal:
enabled: true
dir: /wal
flush_on_shutdown: true
chunk_idle_period: 5m
chunk_retain_period: 30s
max_chunk_age: 1h
lifecycler:
ring:
replication_factor: 1
schema_config:
configs:
- from: 2020-10-24
store: boltdb-shipper
object_store: filesystem
schema: v11
index:
prefix: index_
period: 24h
storage_config:
boltdb_shipper:
active_index_directory: /loki/index
cache_location: /loki/index
shared_store: filesystem
filesystem:
directory: /loki/chunks
limits_config:
enforce_metric_name: false
reject_old_samples: true
reject_old_samples_max_age: 168h
ingestion_rate_mb: 10
ingestion_burst_size_mb: 20
compactor:
working_directory: /loki/compactor
shared_store: filesystem
compaction_interval: 10m

View File

@ -0,0 +1 @@
{"UID":"714e0dc1-bca9-44e1-aca6-110f8b49de5c","created_at":"2025-09-29T13:46:47.834317171Z","version":{"version":"2.8.2","revision":"9f809eda7","branch":"HEAD","buildUser":"root@e401cfcb874f","buildDate":"2023-05-03T11:07:54Z","goVersion":"go1.20.4"}}

View File

@ -0,0 +1,15 @@
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']
#- job_name: 'node_exporter_vmservices'
# static_configs:
# - targets: ['192.168.56.31:9100']
- job_name: 'apache_vmservices'
static_configs:
- targets: ['192.168.56.32:9117'] #ip vmService

View File

@ -26,7 +26,8 @@ echo "Copie du dossier 'observability' vers /root..."
sudo cp -r /vagrant/observability /root
echo "Déplacement dans le dossier observability..."
cd /root/observability || { echo "Échec : dossier introuvable."; exit 1; }
#sudo -i
cd ~/observability
echo "Modification des permissions des dossiers Loki..."
sudo chown -R 10001:10001 loki-wal loki-chunks loki-index

View File

@ -0,0 +1,111 @@
-- Ajout des rôles
INSERT INTO role (nom)
VALUES ('Administrateur'),
('Enseignant'),
('Étudiant');
-- Ajout des utilisateurs
INSERT INTO utilisateur (login, mot_de_passe, nom, prenom, email)
VALUES ('jlpicar', 'password', 'Picard', 'Jean-Luc', 'jean-luc.picard@starfleet.com'),
('kjaneway', 'password', 'Janeway', 'Kathryn', 'kathryn.janeway@starfleet.com'),
('spock', 'password', '', 'Spock', 'spock@starfleet.com'),
('wriker', 'password', 'Riker', 'William', 'william.riker@starfleet.com'),
('data', 'password', '', 'Data', 'data@starfleet.com'),
('glaforge', 'password', 'LaForge', 'Geordi', 'geordi.laforge@starfleet.com'),
('tuvok', 'password', 'Tuvok', '', 'tuvok@starfleet.com'),
('wcrusher', 'password', 'Crusher', 'Wesley', 'wesley.crusher@starfleet.com'),
('tparis', 'password', 'Paris', 'Tom', 'tom.paris@starfleet.com'),
('hkim', 'password', 'Kim', 'Harry', 'harry.kim@starfleet.com'),
('jsisko', 'password', 'Sisko', 'Jake', 'jake.sisko@starfleet.com'),
('nog', 'password', '', 'Nog', 'nog@starfleet.com'),
('bmariner', 'password', 'Mariner', 'Beckett', 'beckett.mariner@starfleet.com'),
('bboimler', 'password', 'Boimler', 'Brad', 'brad.boimler@starfleet.com'),
('dvtendi', 'password', 'Tendi', 'D''Vana', 'd-vana.tendi@starfleet.com'),
('srutherf', 'password', 'Rutherford', 'Sam', 'sam.rutherford@starfleet.com'),
('drel', 'password', 'R''El', 'Dal', 'dal.r-el@starfleet.com'),
('gwyndala', 'password', '', 'Gwyndala', 'gwyndala@starfleet.com'),
('roktahk', 'password', '', 'Rok-Tahk', 'rok-takh@starfleet.com'),
('zero', 'password', 'Zero', '', 'zero@starfleet.com'),
('jpog', 'password', 'Pog', 'Jankom', 'jankom.pog@starfleet.com'),
('murf', 'password', '', 'Murf', 'murf@starfleet.com');
-- Ajout des rôles aux utilisateurs
INSERT INTO utilisateur_role (utilisateur_id, role_id)
VALUES ((select id from utilisateur where login = 'jlpicar'), 1), -- Picard as Administrator
((select id from utilisateur where login = 'kjaneway'), 1), -- Janeway as Administrator
((select id from utilisateur where login = 'spock'), 2), -- Spock as Teacher
((select id from utilisateur where login = 'wriker'), 2), -- Riker as Teacher
((select id from utilisateur where login = 'data'), 2), -- Data as Teacher
((select id from utilisateur where login = 'glaforge'), 2), -- LaForge as Teacher
((select id from utilisateur where login = 'tuvok'), 2), -- Tuvok as Teacher
((select id from utilisateur where login = 'wcrusher'), 3), -- Crusher as Student
((select id from utilisateur where login = 'tparis'), 3), -- Paris as Student
((select id from utilisateur where login = 'hkim'), 3), -- Kim as Student
((select id from utilisateur where login = 'jsisko'), 3), -- Sisko as Student
((select id from utilisateur where login = 'nog'), 3), -- Nog as Student
((select id from utilisateur where login = 'bmariner'), 3), -- Mariner as Student
((select id from utilisateur where login = 'bboimler'), 3), -- Boimler as Student
((select id from utilisateur where login = 'dvtendi'), 3), -- Tendi as Student
((select id from utilisateur where login = 'srutherf'), 3), -- Rutherford as Student
((select id from utilisateur where login = 'drel'), 3), -- R'El as Student
((select id from utilisateur where login = 'gwyndala'), 3), -- Gwyndala as Student
((select id from utilisateur where login = 'roktahk'), 3), -- Rok-Tahk as Student
((select id from utilisateur where login = 'zero'), 3), -- Zero as Student
((select id from utilisateur where login = 'jpog'), 3), -- Pog as Student
((select id from utilisateur where login = 'murf'), 3);
-- Murf as Student
-- Ajout des cours
INSERT INTO cours (nom, date_debut)
VALUES ('Starship Command', '2023-01-01'),
('Warp Theory', '2023-02-01'),
('Federation Ethics', '2023-03-01');
-- Ajout des enseignements
INSERT INTO enseignement (utilisateur_id, cours_id)
VALUES ((select id from utilisateur where login = 'spock'), 1), -- Spock teaches Starship Command
((select id from utilisateur where login = 'wriker'), 1), -- Riker teaches Starship Command
((select id from utilisateur where login = 'data'), 2), -- Data teaches Warp Theory
((select id from utilisateur where login = 'glaforge'), 2), -- LaForge teaches Warp Theory
((select id from utilisateur where login = 'tuvok'), 3);
-- Tuvok teaches Federation Ethics
-- Ajout des inscriptions
INSERT INTO inscription (utilisateur_id, cours_id)
VALUES ((select id from utilisateur where login = 'wcrusher'), 1), -- Crusher enrolls in Starship Command
((select id from utilisateur where login = 'wcrusher'), 2), -- Crusher enrolls in Warp Theory
((select id from utilisateur where login = 'tparis'), 1), -- Paris enrolls in Starship Command
((select id from utilisateur where login = 'hkim'), 1), -- Kim enrolls in Starship Command
((select id from utilisateur where login = 'jsisko'), 3), -- Sisko enrolls in Federation Ethics
((select id from utilisateur where login = 'nog'), 1), -- Nog enrolls in Starship Command
((select id from utilisateur where login = 'nog'), 3), -- Nog enrolls in Federation Ethics
((select id from utilisateur where login = 'bboimler'), 1), -- Boimler enrolls in Starship Command
((select id from utilisateur where login = 'dvtendi'), 2), -- Tendi enrolls in Warp Theory
((select id from utilisateur where login = 'srutherf'), 2), -- Rutherford enrolls in Warp Theory
((select id from utilisateur where login = 'drel'), 1), -- R'El enrolls in Starship Command
((select id from utilisateur where login = 'gwyndala'), 1), -- Gwyndala enrolls in Starship Command
((select id from utilisateur where login = 'gwyndala'), 3), -- Gwyndala enrolls in Federation Ethics
((select id from utilisateur where login = 'roktahk'), 2), -- Rok-Tahk enrolls in Warp Theory
((select id from utilisateur where login = 'zero'), 3), -- Zero enrolls in Federation Ethics
((select id from utilisateur where login = 'jpog'), 2);
-- Pog enrolls in Warp Theory
-- Ajout des contenus de cours
INSERT INTO contenu_cours (cours_id, titre, description)
VALUES (1, 'Bridge Operations', 'Learn how to manage starship operations.'),
(1, 'Tactical Maneuvers', 'Advanced starship tactics.'),
(1, 'Command Decision Making', 'Develop leadership skills for commanding a starship.'),
(2, 'Warp Core Mechanics', 'Understand the inner workings of warp cores.'),
(2, 'Dilithium Crystals', 'Study the properties of dilithium crystals.'),
(2, 'Warp Field Theory', 'Explore the physics behind warp travel.'),
(3, 'Prime Directive', 'Ethical dilemmas in space exploration.'),
(3, 'Cultural Sensitivity', 'Learn how to interact with alien civilizations.'),
(3, 'Federation Law', 'Understand the legal framework of the Federation.');
-- Ajout des devoirs
INSERT INTO devoir (cours_id, titre, description, date_debut, date_fin)
VALUES (1, 'Bridge Simulation', 'Participate in a simulated bridge operation.', '2023-01-15', '2023-01-30'),
(1, 'Tactical Exercise', 'Plan and execute tactical maneuvers.', '2023-01-20', '2023-02-05'),
(2, 'Warp Core Analysis', 'Analyze the efficiency of a warp core.', '2023-02-10', '2023-02-25'),
(2, 'Crystal Experiment', 'Experiment with dilithium crystals.', '2023-02-15', '2023-03-01'),
(3, 'Prime Directive Case Study', 'Discuss ethical dilemmas.', '2023-03-10', '2023-03-25');

View File

@ -0,0 +1,136 @@
-- Drop tables if they exist
DROP TABLE IF EXISTS note;
DROP TABLE IF EXISTS rendu_devoir;
DROP TABLE IF EXISTS devoir;
DROP TABLE IF EXISTS contenu_cours;
DROP TABLE IF EXISTS inscription;
DROP TABLE IF EXISTS enseignement;
DROP TABLE IF EXISTS cours;
DROP TABLE IF EXISTS utilisateur_role;
DROP TABLE IF EXISTS role;
DROP TABLE IF EXISTS utilisateur;
-- Table utilisateur : représente un utilisateur de l'application
CREATE TABLE utilisateur
(
id BIGSERIAL NOT NULL,
login VARCHAR(8) NOT NULL,
mot_de_passe VARCHAR(255) NOT NULL,
nom VARCHAR(100),
prenom VARCHAR(100),
email VARCHAR(255),
PRIMARY KEY (id),
CONSTRAINT ux_u_login UNIQUE (login),
CONSTRAINT ux_u_email UNIQUE (email)
);
CREATE INDEX ix_u_login ON utilisateur (login);
-- Table role : représente un rôle d'utilisateur
CREATE TABLE role
(
id BIGSERIAL NOT NULL,
nom VARCHAR(100) NOT NULL,
PRIMARY KEY (id),
CONSTRAINT ux_r_nom UNIQUE (nom)
);
CREATE INDEX ix_r_nom ON role (nom);
-- Table utilisateur_role : association entre utilisateurs et rôles
CREATE TABLE utilisateur_role
(
utilisateur_id BIGINT NOT NULL,
role_id BIGINT NOT NULL,
PRIMARY KEY (utilisateur_id, role_id),
CONSTRAINT fk_ur_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE,
CONSTRAINT fk_ur_role_id FOREIGN KEY (role_id) REFERENCES role (id) ON DELETE CASCADE
);
CREATE INDEX ix_ur_utilisateur_id ON utilisateur_role (utilisateur_id);
CREATE INDEX ix_ur_role_id ON utilisateur_role (role_id);
-- Table cours : représente un cours
CREATE TABLE cours
(
id BIGSERIAL NOT NULL,
nom VARCHAR(100) NOT NULL,
date_debut DATE NOT NULL,
PRIMARY KEY (id)
);
CREATE INDEX ix_c_nom ON cours (nom);
-- Table enseignement : association entre utilisateurs et cours
CREATE TABLE enseignement
(
utilisateur_id BIGINT NOT NULL,
cours_id BIGINT NOT NULL,
PRIMARY KEY (utilisateur_id, cours_id),
CONSTRAINT fk_e_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE,
CONSTRAINT fk_e_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_e_utilisateur_id ON enseignement (utilisateur_id);
CREATE INDEX ix_e_cours_id ON enseignement (cours_id);
-- Table inscription : association entre utilisateurs et cours
CREATE TABLE inscription
(
utilisateur_id BIGINT NOT NULL,
cours_id BIGINT NOT NULL,
date_inscription TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (utilisateur_id, cours_id),
CONSTRAINT fk_i_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE,
CONSTRAINT fk_i_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_i_utilisateur_id ON inscription (utilisateur_id);
CREATE INDEX ix_i_cours_id ON inscription (cours_id);
-- Table contenu_cours : représente le contenu d'un cours
CREATE TABLE contenu_cours
(
id BIGSERIAL NOT NULL,
cours_id BIGINT NOT NULL,
titre VARCHAR(255) NOT NULL,
description TEXT,
PRIMARY KEY (id),
CONSTRAINT fk_cc_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_cc_cours_id ON contenu_cours (cours_id);
-- Table devoir : représente un devoir associé à un cours
CREATE TABLE devoir
(
id BIGSERIAL NOT NULL,
cours_id BIGINT NOT NULL,
titre VARCHAR(255) NOT NULL,
description TEXT,
date_debut TIMESTAMP NOT NULL,
date_fin TIMESTAMP NOT NULL,
PRIMARY KEY (id),
CONSTRAINT fk_d_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_d_cours_id ON devoir (cours_id);
-- Table rendu_devoir : représente le rendu d'un devoir par un utilisateur
CREATE TABLE rendu_devoir
(
id BIGSERIAL NOT NULL,
devoir_id BIGINT NOT NULL,
utilisateur_id BIGINT NOT NULL,
date_rendu TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
contenu TEXT,
PRIMARY KEY (id),
CONSTRAINT fk_rd_devoir_id FOREIGN KEY (devoir_id) REFERENCES devoir (id) ON DELETE CASCADE,
CONSTRAINT fk_rd_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE
);
CREATE INDEX ix_rd_devoir_id ON rendu_devoir (devoir_id);
CREATE INDEX ix_rd_utilisateur_id ON rendu_devoir (utilisateur_id);
-- Table note : représente une note attribuée à un rendu de devoir
CREATE TABLE note
(
id BIGSERIAL NOT NULL,
rendu_devoir_id BIGINT NOT NULL,
valeur DECIMAL(5, 2) NOT NULL CHECK (valeur >= 0 AND valeur <= 20),
date_attribution TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (id),
CONSTRAINT fk_n_rendu_devoir_id FOREIGN KEY (rendu_devoir_id) REFERENCES rendu_devoir (id) ON DELETE CASCADE
);
CREATE INDEX ix_n_rendu_devoir_id ON note (rendu_devoir_id);

View File

@ -0,0 +1,86 @@
#!/bin/bash
# Script d'installation et déploiement ArchiWeb
# À exécuter en tant que root ou avec sudo
set -e # Arrêt si une commande échoue
set -o pipefail
echo "=== Mise à jour du système ==="
sudo apt update && sudo apt upgrade -y
echo "=== Installation Java 17 et Maven ==="
sudo apt install -y openjdk-17-jdk maven wget gnupg lsb-release
echo "=== Vérification des versions ==="
java -version
mvn -version
echo "=== Installation de Tomcat 10 ==="
cd /opt
sudo wget https://dlcdn.apache.org/tomcat/tomcat-10/v10.1.47/bin/apache-tomcat-10.1.47.tar.gz
sudo tar -xvzf apache-tomcat-10.1.47.tar.gz
sudo mv apache-tomcat-10.1.47 tomcat
sudo chown -R vagrant:vagrant /opt/tomcat
echo "=== Configuration des variables d'environnement ==="
if ! grep -q "CATALINA_HOME" ~/.bashrc; then
echo "export CATALINA_HOME=/opt/tomcat" >> ~/.bashrc
echo 'export PATH=$PATH:$CATALINA_HOME/bin' >> ~/.bashrc
fi
source ~/.bashrc
echo "=== Création du service systemd pour Tomcat ==="
cat <<EOF | sudo tee /etc/systemd/system/tomcat.service
[Unit]
Description=Apache Tomcat 10.1.47
After=network.target
[Service]
Type=forking
Environment=JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64
Environment=CATALINA_HOME=/opt/tomcat
ExecStart=/opt/tomcat/bin/startup.sh
ExecStop=/opt/tomcat/bin/shutdown.sh
User=vagrant
Group=vagrant
RestartSec=10
Restart=always
[Install]
WantedBy=multi-user.target
EOF
sudo systemctl daemon-reload
sudo systemctl enable tomcat
sudo systemctl start tomcat
sudo systemctl status tomcat --no-pager
echo "=== Déploiement du WAR ArchiWeb ==="
sudo cp /vagrant/archiweb-api-1.0.0.war /opt/tomcat/webapps/
echo "=== Installation PostgreSQL 18 ==="
echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | sudo tee /etc/apt/sources.list.d/pgdg.list
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo apt update
sudo apt install -y postgresql-18 postgresql-client-18 postgresql-contrib
echo "=== Configuration de PostgreSQL ==="
sudo -u postgres psql <<EOF
DROP DATABASE IF EXISTS archiweb_db;
CREATE DATABASE archiweb_db;
CREATE USER archiweb_user WITH ENCRYPTED PASSWORD 'archiweb_pass';
GRANT ALL PRIVILEGES ON DATABASE archiweb_db TO archiweb_user;
\q
EOF
# Modification du fichier pg_hba.conf pour md5
PG_HBA="/etc/postgresql/18/main/pg_hba.conf"
sudo sed -i "s/^local\s\+all\s\+all\s\+peer/local all all md5/" $PG_HBA
sudo systemctl restart postgresql
sudo systemctl status postgresql --no-pager
echo "=== Import des schémas et données ==="
sudo -u archiweb_user psql -U archiweb_user -d archiweb_db -f /vagrant/database/schema.sql -W <<< "archiweb_pass"
sudo -u archiweb_user psql -U archiweb_user -d archiweb_db -f /vagrant/database/data.sql -W <<< "archiweb_pass"
echo "=== Installation et déploiement terminé ==="

View File

@ -0,0 +1,111 @@
#!/bin/bash
set -e
# Variables
PROMTAIL_VERSION="v3.1.1"
PROMTAIL_DIR="/opt/promtail"
CONFIG_DIR="/etc/promtail"
TOMCAT_LOG_DIR="/opt/tomcat/logs"
LOKI_URL="http://192.168.56.20:3100/loki/api/v1/push"
PROMTAIL_USER="promtail"
PROMTAIL_GROUP="promtail"
TOMCAT_GROUP="tomcatlogs"
# 1. Installation de Promtail
echo "=== Installation de Promtail ==="
mkdir -p $PROMTAIL_DIR
cd $PROMTAIL_DIR
wget -q https://github.com/grafana/loki/releases/download/$PROMTAIL_VERSION/promtail-linux-amd64.zip
apt-get update && apt-get install -y unzip
unzip -o promtail-linux-amd64.zip
mv promtail-linux-amd64 promtail
chmod +x promtail
# 2. Création de l'utilisateur promtail
id -u $PROMTAIL_USER &>/dev/null || useradd -r -s /bin/false $PROMTAIL_USER
chown -R $PROMTAIL_USER:$PROMTAIL_GROUP $PROMTAIL_DIR || true
# 3. Configuration Promtail
echo "=== Configuration Promtail ==="
mkdir -p $CONFIG_DIR
cat > $CONFIG_DIR/config-promtail.yaml <<EOF
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /tmp/positions.yaml
clients:
- url: $LOKI_URL
scrape_configs:
- job_name: tomcat
static_configs:
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: catalina
__path__: $TOMCAT_LOG_DIR/catalina*.log
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: localhost
__path__: $TOMCAT_LOG_DIR/localhost*.log
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: access
__path__: $TOMCAT_LOG_DIR/localhost_access_log*.txt
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: out
__path__: $TOMCAT_LOG_DIR/catalina.out
EOF
# 4. Création du service systemd
echo "=== Création du service systemd ==="
cat > /etc/systemd/system/promtail.service <<EOF
[Unit]
Description=Promtail service
After=network.target
[Service]
User=$PROMTAIL_USER
ExecStart=$PROMTAIL_DIR/promtail -config.file=$CONFIG_DIR/config-promtail.yaml
Restart=always
[Install]
WantedBy=multi-user.target
EOF
# 5. Gestion des permissions Tomcat logs
echo "=== Configuration des permissions Tomcat logs ==="
groupadd -f $TOMCAT_GROUP
usermod -aG $TOMCAT_GROUP $PROMTAIL_USER || true
usermod -aG $TOMCAT_GROUP vagrant || true
chown -R vagrant:$TOMCAT_GROUP $TOMCAT_LOG_DIR
chmod -R 750 $TOMCAT_LOG_DIR
chmod -R g+r $TOMCAT_LOG_DIR/*.log
# 6. Activation du service Promtail
echo "=== Activation du service Promtail ==="
systemctl daemon-reload
systemctl enable --now promtail
systemctl restart promtail
systemctl status promtail --no-pager
# 7. Vérification des logs
echo "=== Vérification des logs ==="
ls -ld $TOMCAT_LOG_DIR
ls -l $TOMCAT_LOG_DIR | head

View File

@ -0,0 +1,104 @@
#!/bin/bash
set -e
# Variables
APACHE_EXPORTER_VERSION="1.0.10"
PROMTAIL_VERSION="2.9.0"
PROMTAIL_CONFIG="/etc/promtail-config.yaml"
LOKI_URL="http://192.168.56.20:3100/loki/api/v1/push"
echo "=== Installation des dépendances ==="
sudo apt update
sudo apt install -y wget unzip apache2
echo "=== Installation et configuration de apache_exporter ==="
cd /opt
wget https://github.com/Lusitaniae/apache_exporter/releases/download/v${APACHE_EXPORTER_VERSION}/apache_exporter-${APACHE_EXPORTER_VERSION}.linux-amd64.tar.gz
tar -xvf apache_exporter-${APACHE_EXPORTER_VERSION}.linux-amd64.tar.gz
sudo mv apache_exporter-${APACHE_EXPORTER_VERSION}.linux-amd64/apache_exporter /usr/local/bin/
# Création du service systemd pour apache_exporter
sudo tee /etc/systemd/system/apache_exporter.service > /dev/null <<EOL
[Unit]
Description=Prometheus Apache Exporter
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/apache_exporter --scrape_uri="http://localhost/server-status?auto"
[Install]
WantedBy=multi-user.target
EOL
echo "=== Activation du module status d'Apache ==="
sudo a2enmod status
sudo systemctl restart apache2
echo "=== Démarrage du service apache_exporter ==="
sudo systemctl daemon-reload
sudo systemctl enable apache_exporter
sudo systemctl start apache_exporter
sudo systemctl status apache_exporter --no-pager
echo "=== Installation et configuration de Promtail ==="
cd /opt
wget https://github.com/grafana/loki/releases/download/v${PROMTAIL_VERSION}/promtail-linux-amd64.zip
unzip promtail-linux-amd64.zip
sudo mv promtail-linux-amd64 /usr/local/bin/promtail
sudo chmod +x /usr/local/bin/promtail
# Création du fichier de configuration Promtail
sudo tee ${PROMTAIL_CONFIG} > /dev/null <<EOL
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /var/log/promtail-positions.yaml
clients:
- url: ${LOKI_URL}
scrape_configs:
- job_name: apache_access
static_configs:
- targets: ['localhost']
labels:
job: apache_access
host: apache_vm
__path__: /var/log/apache2/access.log
- job_name: apache_error
static_configs:
- targets: ['localhost']
labels:
job: apache_error
host: apache_vm
__path__: /var/log/apache2/error.log
EOL
# Création du service systemd pour Promtail
sudo tee /etc/systemd/system/promtail.service > /dev/null <<EOL
[Unit]
Description=Promtail service for Apache logs
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/promtail -config.file ${PROMTAIL_CONFIG}
Restart=always
[Install]
WantedBy=multi-user.target
EOL
echo "=== Démarrage du service Promtail ==="
sudo systemctl daemon-reload
sudo systemctl enable promtail
sudo systemctl start promtail
sudo systemctl status promtail --no-pager
echo "=== Installation terminée ==="

Binary file not shown.

View File

@ -0,0 +1,113 @@
1. Préparation de la VM Applicatif (192.168.56.30)
1.1 Mise à jour du système
sudo apt update && sudo apt upgrade -y
1.2 Installation de Java et Maven
sudo apt install -y openjdk-17-jdk maven
java -version
mvn -version
1.3 Installation de Tomcat
cd /opt
sudo wget https://dlcdn.apache.org/tomcat/tomcat-10/v10.1.47/bin/apache-tomcat-10.1.47.tar.gz
sudo tar -xvzf apache-tomcat-10.1.47.tar.gz
sudo mv apache-tomcat-10.1.47 tomcat
sudo chown -R vagrant:vagrant /opt/tomcat
nano ~/.bashrc
export CATALINA_HOME=/opt/tomcat
export PATH=$PATH:$CATALINA_HOME/bin
source ~/.bashrc
sudo nano /etc/systemd/system/tomcat.service
------------------
[Unit]
Description=Apache Tomcat 10.1.47
After=network.target
[Service]
Type=forking
Environment=JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64
Environment=CATALINA_HOME=/opt/tomcat
ExecStart=/opt/tomcat/bin/startup.sh
ExecStop=/opt/tomcat/bin/shutdown.sh
User=vagrant
Group=vagrant
RestartSec=10
Restart=always
[Install]
WantedBy=multi-user.target
----------------------------------------------
sudo systemctl daemon-reload
sudo systemctl enable tomcat
sudo systemctl start tomcat
sudo systemctl status tomcat
1.4 Déploiement de lapplication Java
Copie du fichier .war depuis le PC hôte: # debrouille toi
scp target/archiweb-api-1.0.0.war vagrant@192.168.56.31:/tmp/
sudo cp /tmp/archiweb-api-1.0.0.war /opt/tomcat/webapps/
sudo mv /opt/tomcat/webapps/archiweb-api-1.0.0.war /opt/tomcat/webapps/api.war
2. Base de données PostgreSQL (hébergée sur la vm)
2.1
sudo apt update && sudo apt upgrade -y
sudo apt install postgresql postgresql-contrib -y
sudo systemctl status postgresql(Facultatif)
Vérifier postgresql.conf afin que # listen_addresses = 'localhost'
La commande pour le faire :sudo nano /etc/postgresql/15/main/postgresql.conf
2.2 Création de la base
sudo apt update
sudo apt upgrade -y
sudo apt install -y wget gnupg lsb-release
# Ajouter le dépôt officiel PostgreSQL
echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | sudo tee /etc/apt/sources.list.d/pgdg.list
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo apt update
# Installer le serveur PostgreSQL
sudo apt install -y postgresql-18
sudo -u postgres psql <<EOF
CREATE DATABASE archiweb_db;
CREATE USER archiweb_user WITH ENCRYPTED PASSWORD 'archiweb_pass';
GRANT ALL PRIVILEGES ON DATABASE archiweb_db TO archiweb_user;
\q
EOF
sudo nano /etc/postgresql/18/main/pg_hba.conf
local all all md5
sudo systemctl restart postgresql
sudo systemctl status postgresql
sudo -u postgres psql
DROP DATABASE IF EXISTS archiweb_db;
CREATE DATABASE archiweb_db OWNER archiweb_user;
\q
psql -U archiweb_user -d archiweb_db -f /vagrant/database/schema.sql
psql -U archiweb_user -d archiweb_db -f /vagrant/database/data.sql
Mdp:(archiweb_pass)
http://192.168.56.31:8080/api/utilisateurs

View File

@ -0,0 +1,98 @@
sudo mkdir -p /opt/promtail
cd /opt/promtail
sudo wget https://github.com/grafana/loki/releases/download/v3.1.1/promtail-linux-amd64.zip
sudo apt install -y unzip # si nécessaire
sudo unzip promtail-linux-amd64.zip
sudo mv promtail-linux-amd64 promtail
sudo chmod +x promtail
sudo useradd -r -s /bin/false promtail || true
sudo chown -R promtail:promtail /opt/promtail
root@vmApplicatif:~# cd /etc/
root@vmApplicatif:/etc# mkdir promtail
cd promtail
root@vmApplicatif:/etc/promtail# nano config-promtail.yaml
-------------
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /tmp/positions.yaml
clients:
- url: http://192.168.56.30:3100/loki/api/v1/push
scrape_configs:
- job_name: tomcat
static_configs:
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: catalina
__path__: /opt/tomcat/logs/catalina*.log
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: localhost
__path__: /opt/tomcat/logs/localhost*.log
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: access
__path__: /opt/tomcat/logs/localhost_access_log*.txt
- targets:
- localhost
labels:
job: tomcat
host: applicatif-vm
type: out
__path__: /opt/tomcat/logs/catalina.out
---------------------------
nano /etc/systemd/system/promtail.service
[Unit]
Description=Promtail service
After=network.target
[Service]
User=promtail
ExecStart=/opt/promtail/promtail-linux-amd64 -config.file=/etc/promtail/config-promtail.yaml
Restart=always
[Install]
WantedBy=multi-user.target
--------------------------
sudo systemctl daemon-reload
sudo systemctl enable --now promtail
sudo systemctl start promtail
sudo systemctl status promtail
sudo ls -ld /opt/tomcat/logs
sudo ls -l /opt/tomcat/logs | head
# Donner la propriété complète à tomcat
sudo chown -R tomcat:tomcat /opt/tomcat/logs
# Donner les droits d'accès comme dans ton exemple
sudo chmod 750 /opt/tomcat/logs
# Donner des droits de lecture/écriture classiques aux fichiers de logs
sudo chmod 640 /opt/tomcat/logs/*

View File

@ -0,0 +1,111 @@
-- Ajout des rôles
INSERT INTO role (nom)
VALUES ('Administrateur'),
('Enseignant'),
('Étudiant');
-- Ajout des utilisateurs
INSERT INTO utilisateur (login, mot_de_passe, nom, prenom, email)
VALUES ('jlpicar', 'password', 'Picard', 'Jean-Luc', 'jean-luc.picard@starfleet.com'),
('kjaneway', 'password', 'Janeway', 'Kathryn', 'kathryn.janeway@starfleet.com'),
('spock', 'password', '', 'Spock', 'spock@starfleet.com'),
('wriker', 'password', 'Riker', 'William', 'william.riker@starfleet.com'),
('data', 'password', '', 'Data', 'data@starfleet.com'),
('glaforge', 'password', 'LaForge', 'Geordi', 'geordi.laforge@starfleet.com'),
('tuvok', 'password', 'Tuvok', '', 'tuvok@starfleet.com'),
('wcrusher', 'password', 'Crusher', 'Wesley', 'wesley.crusher@starfleet.com'),
('tparis', 'password', 'Paris', 'Tom', 'tom.paris@starfleet.com'),
('hkim', 'password', 'Kim', 'Harry', 'harry.kim@starfleet.com'),
('jsisko', 'password', 'Sisko', 'Jake', 'jake.sisko@starfleet.com'),
('nog', 'password', '', 'Nog', 'nog@starfleet.com'),
('bmariner', 'password', 'Mariner', 'Beckett', 'beckett.mariner@starfleet.com'),
('bboimler', 'password', 'Boimler', 'Brad', 'brad.boimler@starfleet.com'),
('dvtendi', 'password', 'Tendi', 'D''Vana', 'd-vana.tendi@starfleet.com'),
('srutherf', 'password', 'Rutherford', 'Sam', 'sam.rutherford@starfleet.com'),
('drel', 'password', 'R''El', 'Dal', 'dal.r-el@starfleet.com'),
('gwyndala', 'password', '', 'Gwyndala', 'gwyndala@starfleet.com'),
('roktahk', 'password', '', 'Rok-Tahk', 'rok-takh@starfleet.com'),
('zero', 'password', 'Zero', '', 'zero@starfleet.com'),
('jpog', 'password', 'Pog', 'Jankom', 'jankom.pog@starfleet.com'),
('murf', 'password', '', 'Murf', 'murf@starfleet.com');
-- Ajout des rôles aux utilisateurs
INSERT INTO utilisateur_role (utilisateur_id, role_id)
VALUES ((select id from utilisateur where login = 'jlpicar'), 1), -- Picard as Administrator
((select id from utilisateur where login = 'kjaneway'), 1), -- Janeway as Administrator
((select id from utilisateur where login = 'spock'), 2), -- Spock as Teacher
((select id from utilisateur where login = 'wriker'), 2), -- Riker as Teacher
((select id from utilisateur where login = 'data'), 2), -- Data as Teacher
((select id from utilisateur where login = 'glaforge'), 2), -- LaForge as Teacher
((select id from utilisateur where login = 'tuvok'), 2), -- Tuvok as Teacher
((select id from utilisateur where login = 'wcrusher'), 3), -- Crusher as Student
((select id from utilisateur where login = 'tparis'), 3), -- Paris as Student
((select id from utilisateur where login = 'hkim'), 3), -- Kim as Student
((select id from utilisateur where login = 'jsisko'), 3), -- Sisko as Student
((select id from utilisateur where login = 'nog'), 3), -- Nog as Student
((select id from utilisateur where login = 'bmariner'), 3), -- Mariner as Student
((select id from utilisateur where login = 'bboimler'), 3), -- Boimler as Student
((select id from utilisateur where login = 'dvtendi'), 3), -- Tendi as Student
((select id from utilisateur where login = 'srutherf'), 3), -- Rutherford as Student
((select id from utilisateur where login = 'drel'), 3), -- R'El as Student
((select id from utilisateur where login = 'gwyndala'), 3), -- Gwyndala as Student
((select id from utilisateur where login = 'roktahk'), 3), -- Rok-Tahk as Student
((select id from utilisateur where login = 'zero'), 3), -- Zero as Student
((select id from utilisateur where login = 'jpog'), 3), -- Pog as Student
((select id from utilisateur where login = 'murf'), 3);
-- Murf as Student
-- Ajout des cours
INSERT INTO cours (nom, date_debut)
VALUES ('Starship Command', '2023-01-01'),
('Warp Theory', '2023-02-01'),
('Federation Ethics', '2023-03-01');
-- Ajout des enseignements
INSERT INTO enseignement (utilisateur_id, cours_id)
VALUES ((select id from utilisateur where login = 'spock'), 1), -- Spock teaches Starship Command
((select id from utilisateur where login = 'wriker'), 1), -- Riker teaches Starship Command
((select id from utilisateur where login = 'data'), 2), -- Data teaches Warp Theory
((select id from utilisateur where login = 'glaforge'), 2), -- LaForge teaches Warp Theory
((select id from utilisateur where login = 'tuvok'), 3);
-- Tuvok teaches Federation Ethics
-- Ajout des inscriptions
INSERT INTO inscription (utilisateur_id, cours_id)
VALUES ((select id from utilisateur where login = 'wcrusher'), 1), -- Crusher enrolls in Starship Command
((select id from utilisateur where login = 'wcrusher'), 2), -- Crusher enrolls in Warp Theory
((select id from utilisateur where login = 'tparis'), 1), -- Paris enrolls in Starship Command
((select id from utilisateur where login = 'hkim'), 1), -- Kim enrolls in Starship Command
((select id from utilisateur where login = 'jsisko'), 3), -- Sisko enrolls in Federation Ethics
((select id from utilisateur where login = 'nog'), 1), -- Nog enrolls in Starship Command
((select id from utilisateur where login = 'nog'), 3), -- Nog enrolls in Federation Ethics
((select id from utilisateur where login = 'bboimler'), 1), -- Boimler enrolls in Starship Command
((select id from utilisateur where login = 'dvtendi'), 2), -- Tendi enrolls in Warp Theory
((select id from utilisateur where login = 'srutherf'), 2), -- Rutherford enrolls in Warp Theory
((select id from utilisateur where login = 'drel'), 1), -- R'El enrolls in Starship Command
((select id from utilisateur where login = 'gwyndala'), 1), -- Gwyndala enrolls in Starship Command
((select id from utilisateur where login = 'gwyndala'), 3), -- Gwyndala enrolls in Federation Ethics
((select id from utilisateur where login = 'roktahk'), 2), -- Rok-Tahk enrolls in Warp Theory
((select id from utilisateur where login = 'zero'), 3), -- Zero enrolls in Federation Ethics
((select id from utilisateur where login = 'jpog'), 2);
-- Pog enrolls in Warp Theory
-- Ajout des contenus de cours
INSERT INTO contenu_cours (cours_id, titre, description)
VALUES (1, 'Bridge Operations', 'Learn how to manage starship operations.'),
(1, 'Tactical Maneuvers', 'Advanced starship tactics.'),
(1, 'Command Decision Making', 'Develop leadership skills for commanding a starship.'),
(2, 'Warp Core Mechanics', 'Understand the inner workings of warp cores.'),
(2, 'Dilithium Crystals', 'Study the properties of dilithium crystals.'),
(2, 'Warp Field Theory', 'Explore the physics behind warp travel.'),
(3, 'Prime Directive', 'Ethical dilemmas in space exploration.'),
(3, 'Cultural Sensitivity', 'Learn how to interact with alien civilizations.'),
(3, 'Federation Law', 'Understand the legal framework of the Federation.');
-- Ajout des devoirs
INSERT INTO devoir (cours_id, titre, description, date_debut, date_fin)
VALUES (1, 'Bridge Simulation', 'Participate in a simulated bridge operation.', '2023-01-15', '2023-01-30'),
(1, 'Tactical Exercise', 'Plan and execute tactical maneuvers.', '2023-01-20', '2023-02-05'),
(2, 'Warp Core Analysis', 'Analyze the efficiency of a warp core.', '2023-02-10', '2023-02-25'),
(2, 'Crystal Experiment', 'Experiment with dilithium crystals.', '2023-02-15', '2023-03-01'),
(3, 'Prime Directive Case Study', 'Discuss ethical dilemmas.', '2023-03-10', '2023-03-25');

View File

@ -0,0 +1,136 @@
-- Drop tables if they exist
DROP TABLE IF EXISTS note;
DROP TABLE IF EXISTS rendu_devoir;
DROP TABLE IF EXISTS devoir;
DROP TABLE IF EXISTS contenu_cours;
DROP TABLE IF EXISTS inscription;
DROP TABLE IF EXISTS enseignement;
DROP TABLE IF EXISTS cours;
DROP TABLE IF EXISTS utilisateur_role;
DROP TABLE IF EXISTS role;
DROP TABLE IF EXISTS utilisateur;
-- Table utilisateur : représente un utilisateur de l'application
CREATE TABLE utilisateur
(
id BIGSERIAL NOT NULL,
login VARCHAR(8) NOT NULL,
mot_de_passe VARCHAR(255) NOT NULL,
nom VARCHAR(100),
prenom VARCHAR(100),
email VARCHAR(255),
PRIMARY KEY (id),
CONSTRAINT ux_u_login UNIQUE (login),
CONSTRAINT ux_u_email UNIQUE (email)
);
CREATE INDEX ix_u_login ON utilisateur (login);
-- Table role : représente un rôle d'utilisateur
CREATE TABLE role
(
id BIGSERIAL NOT NULL,
nom VARCHAR(100) NOT NULL,
PRIMARY KEY (id),
CONSTRAINT ux_r_nom UNIQUE (nom)
);
CREATE INDEX ix_r_nom ON role (nom);
-- Table utilisateur_role : association entre utilisateurs et rôles
CREATE TABLE utilisateur_role
(
utilisateur_id BIGINT NOT NULL,
role_id BIGINT NOT NULL,
PRIMARY KEY (utilisateur_id, role_id),
CONSTRAINT fk_ur_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE,
CONSTRAINT fk_ur_role_id FOREIGN KEY (role_id) REFERENCES role (id) ON DELETE CASCADE
);
CREATE INDEX ix_ur_utilisateur_id ON utilisateur_role (utilisateur_id);
CREATE INDEX ix_ur_role_id ON utilisateur_role (role_id);
-- Table cours : représente un cours
CREATE TABLE cours
(
id BIGSERIAL NOT NULL,
nom VARCHAR(100) NOT NULL,
date_debut DATE NOT NULL,
PRIMARY KEY (id)
);
CREATE INDEX ix_c_nom ON cours (nom);
-- Table enseignement : association entre utilisateurs et cours
CREATE TABLE enseignement
(
utilisateur_id BIGINT NOT NULL,
cours_id BIGINT NOT NULL,
PRIMARY KEY (utilisateur_id, cours_id),
CONSTRAINT fk_e_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE,
CONSTRAINT fk_e_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_e_utilisateur_id ON enseignement (utilisateur_id);
CREATE INDEX ix_e_cours_id ON enseignement (cours_id);
-- Table inscription : association entre utilisateurs et cours
CREATE TABLE inscription
(
utilisateur_id BIGINT NOT NULL,
cours_id BIGINT NOT NULL,
date_inscription TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (utilisateur_id, cours_id),
CONSTRAINT fk_i_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE,
CONSTRAINT fk_i_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_i_utilisateur_id ON inscription (utilisateur_id);
CREATE INDEX ix_i_cours_id ON inscription (cours_id);
-- Table contenu_cours : représente le contenu d'un cours
CREATE TABLE contenu_cours
(
id BIGSERIAL NOT NULL,
cours_id BIGINT NOT NULL,
titre VARCHAR(255) NOT NULL,
description TEXT,
PRIMARY KEY (id),
CONSTRAINT fk_cc_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_cc_cours_id ON contenu_cours (cours_id);
-- Table devoir : représente un devoir associé à un cours
CREATE TABLE devoir
(
id BIGSERIAL NOT NULL,
cours_id BIGINT NOT NULL,
titre VARCHAR(255) NOT NULL,
description TEXT,
date_debut TIMESTAMP NOT NULL,
date_fin TIMESTAMP NOT NULL,
PRIMARY KEY (id),
CONSTRAINT fk_d_cours_id FOREIGN KEY (cours_id) REFERENCES cours (id) ON DELETE CASCADE
);
CREATE INDEX ix_d_cours_id ON devoir (cours_id);
-- Table rendu_devoir : représente le rendu d'un devoir par un utilisateur
CREATE TABLE rendu_devoir
(
id BIGSERIAL NOT NULL,
devoir_id BIGINT NOT NULL,
utilisateur_id BIGINT NOT NULL,
date_rendu TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
contenu TEXT,
PRIMARY KEY (id),
CONSTRAINT fk_rd_devoir_id FOREIGN KEY (devoir_id) REFERENCES devoir (id) ON DELETE CASCADE,
CONSTRAINT fk_rd_utilisateur_id FOREIGN KEY (utilisateur_id) REFERENCES utilisateur (id) ON DELETE CASCADE
);
CREATE INDEX ix_rd_devoir_id ON rendu_devoir (devoir_id);
CREATE INDEX ix_rd_utilisateur_id ON rendu_devoir (utilisateur_id);
-- Table note : représente une note attribuée à un rendu de devoir
CREATE TABLE note
(
id BIGSERIAL NOT NULL,
rendu_devoir_id BIGINT NOT NULL,
valeur DECIMAL(5, 2) NOT NULL CHECK (valeur >= 0 AND valeur <= 20),
date_attribution TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (id),
CONSTRAINT fk_n_rendu_devoir_id FOREIGN KEY (rendu_devoir_id) REFERENCES rendu_devoir (id) ON DELETE CASCADE
);
CREATE INDEX ix_n_rendu_devoir_id ON note (rendu_devoir_id);

View File

@ -5,18 +5,19 @@ server:
ingester:
lifecycler:
address: 127.0.0.1
ring:
kvstore:
store: inmemory
replication_factor: 1
chunk_idle_period: 5m
chunk_retain_period: 30s
chunk_target_size: 1048576
max_transfer_retries: 0
schema_config:
configs:
- from: 2020-10-24
store: boltdb-shipper
store: boltdb
object_store: filesystem
schema: v11
index:
@ -24,21 +25,14 @@ schema_config:
period: 24h
storage_config:
boltdb_shipper:
active_index_directory: /loki/index
cache_location: /loki/cache
shared_store: filesystem
boltdb:
directory: /loki/index
filesystem:
directory: /loki/chunks
limits_config:
enforce_metric_name: false
reject_old_samples: true
reject_old_samples_max_age: 168h
chunk_store_config:
max_look_back_period: 0s
table_manager:
retention_deletes_enabled: false
retention_period: 0s

View File

@ -0,0 +1,70 @@
services:
prometheus:
image: prom/prometheus:latest
container_name: prometheus-observability
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- "9090:9090"
networks:
- observability
grafana:
image: grafana/grafana:latest
container_name: grafana-observability
ports:
- "3000:3000"
volumes:
- grafana-data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=admin
networks:
- observability
depends_on:
- loki
loki:
image: grafana/loki:2.8.2
container_name: loki-observability
ports:
- "3100:3100"
command: -config.file=/etc/loki/local-config.yaml
volumes:
- ./loki-config.yaml:/etc/loki/local-config.yaml
- ./loki-wal:/wal
- ./loki-chunks:/loki/chunks
- ./loki-index:/loki/index
networks:
- observability
promtail:
image: grafana/promtail:2.8.2
container_name: promtail-observability
volumes:
- ./promtail-config.yaml:/etc/promtail/config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
command:
- -config.file=/etc/promtail/config.yaml
depends_on:
- loki
networks:
- observability
node_exporter:
image: prom/node-exporter:latest
container_name: node-exporter
restart: unless-stopped
pid: "host"
network_mode: "host"
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro
- /:/rootfs:ro
command:
- '--path.procfs=/host/proc'
- '--path.sysfs=/host/sys'
- '--collector.filesystem.ignored-mount-points="^/(sys|proc|dev|host|etc)($$|/)"'
volumes:
grafana-data:
networks:
observability:
driver: bridge

View File

@ -4,14 +4,17 @@ server:
http_listen_port: 3100
ingester:
wal:
enabled: true
chunk_idle_period: 5m
chunk_retain_period: 30s
chunk_target_size: 1048576
max_transfer_retries: 0
lifecycler:
address: 127.0.0.1
ring:
kvstore:
store: inmemory
chunk_idle_period: 5m
chunk_target_size: 1048576
max_transfer_retries: 0
replication_factor: 1
schema_config:
configs:

View File

@ -0,0 +1,16 @@
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']
- job_name: 'node_exporter'
static_configs:
- targets: ['192.168.4.4:9100']
- job_name: 'scaphandre'
static_configs:
- targets: ['192.168.4.4:8080']
fallback_scrape_protocol: "PrometheusText1.0.0"

View File

@ -0,0 +1,26 @@
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /tmp/positions.yaml
clients:
- url: http://loki:3100/loki/api/v1/push
scrape_configs:
- job_name: docker_logs
static_configs:
- targets:
- localhost
labels:
job: docker
__path__: /var/lib/docker/containers/*/*.log
- job_name: system_logs
static_configs:
- targets:
- localhost
labels:
job: syslog
__path__: /var/log/*.log

View File

@ -0,0 +1,104 @@
#!/bin/bash
set -e
# Variables
APACHE_EXPORTER_VERSION="1.0.10"
PROMTAIL_VERSION="2.9.0"
PROMTAIL_CONFIG="/etc/promtail-config.yaml"
LOKI_URL="http://192.168.56.20:3100/loki/api/v1/push"
echo "=== Installation des dépendances ==="
sudo apt update
sudo apt install -y wget unzip apache2
echo "=== Installation et configuration de apache_exporter ==="
cd /opt
wget https://github.com/Lusitaniae/apache_exporter/releases/download/v${APACHE_EXPORTER_VERSION}/apache_exporter-${APACHE_EXPORTER_VERSION}.linux-amd64.tar.gz
tar -xvf apache_exporter-${APACHE_EXPORTER_VERSION}.linux-amd64.tar.gz
sudo mv apache_exporter-${APACHE_EXPORTER_VERSION}.linux-amd64/apache_exporter /usr/local/bin/
# Création du service systemd pour apache_exporter
sudo tee /etc/systemd/system/apache_exporter.service > /dev/null <<EOL
[Unit]
Description=Prometheus Apache Exporter
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/apache_exporter --scrape_uri="http://localhost/server-status?auto"
[Install]
WantedBy=multi-user.target
EOL
echo "=== Activation du module status d'Apache ==="
sudo a2enmod status
sudo systemctl restart apache2
echo "=== Démarrage du service apache_exporter ==="
sudo systemctl daemon-reload
sudo systemctl enable apache_exporter
sudo systemctl start apache_exporter
sudo systemctl status apache_exporter --no-pager
echo "=== Installation et configuration de Promtail ==="
cd /opt
wget https://github.com/grafana/loki/releases/download/v${PROMTAIL_VERSION}/promtail-linux-amd64.zip
unzip promtail-linux-amd64.zip
sudo mv promtail-linux-amd64 /usr/local/bin/promtail
sudo chmod +x /usr/local/bin/promtail
# Création du fichier de configuration Promtail
sudo tee ${PROMTAIL_CONFIG} > /dev/null <<EOL
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /var/log/promtail-positions.yaml
clients:
- url: ${LOKI_URL}
scrape_configs:
- job_name: apache_access
static_configs:
- targets: ['localhost']
labels:
job: apache_access
host: apache_vm
__path__: /var/log/apache2/access.log
- job_name: apache_error
static_configs:
- targets: ['localhost']
labels:
job: apache_error
host: apache_vm
__path__: /var/log/apache2/error.log
EOL
# Création du service systemd pour Promtail
sudo tee /etc/systemd/system/promtail.service > /dev/null <<EOL
[Unit]
Description=Promtail service for Apache logs
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/promtail -config.file ${PROMTAIL_CONFIG}
Restart=always
[Install]
WantedBy=multi-user.target
EOL
echo "=== Démarrage du service Promtail ==="
sudo systemctl daemon-reload
sudo systemctl enable promtail
sudo systemctl start promtail
sudo systemctl status promtail --no-pager
echo "=== Installation terminée ==="

View File

@ -0,0 +1,124 @@
Étape 1 : Installer Apache sur la machine à superviser
Pour vérifier si Apache est déjà installé :
dpkg -l | grep apache2
Si la commande retourne un numéro de version, Apache est déjà installé. Sinon :
Sur Ubuntu/Debian :
sudo apt update
sudo apt install apache2 -y
sudo systemctl enable apache2
sudo systemctl start apache2
sudo systemctl status apache2
Étape 2 : Installer le module de métriques pour Prometheus
Prometheus ne peut pas lire directement les métriques Apache. On utilise apache_exporter.
2.1 Télécharger apache_exporter
cd /opt
sudo wget https://github.com/Lusitaniae/apache_exporter/releases/download/v1.0.10/apache_exporter-1.0.10.linux-amd64.tar.gz
sudo tar -xvf apache_exporter-1.0.10.linux-amd64.tar.gz
sudo mv apache_exporter-1.0.10.linux-amd64/apache_exporter /usr/local/bin/
2.2 Créer un service systemd pour apache_exporter
sudo nano /etc/systemd/system/apache_exporter.service
------------->
[Unit]
Description=Prometheus Apache Exporter
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/apache_exporter --scrape_uri="http://localhost/server-status?auto"
[Install]
WantedBy=multi-user.target
------------->
2.3 Activer le module status dApache
sudo a2enmod status
sudo systemctl restart apache2
2.4 Démarrer apache_exporter
sudo systemctl daemon-reload
sudo systemctl enable apache_exporter
sudo systemctl start apache_exporter
sudo systemctl status apache_exporter
Étape 4 : Collecter les logs Apache via Loki + Promtail
4.1 Installer Promtail sur la machine supervisée
Si Promtail nest pas encore installé, tu peux le télécharger depuis Grafana :
cd /opt
sudo wget https://github.com/grafana/loki/releases/download/v2.9.0/promtail-linux-amd64.zip
sudo unzip promtail-linux-amd64.zip
sudo mv promtail-linux-amd64 /usr/local/bin/promtail
sudo chmod +x /usr/local/bin/promtail
4.2 Créer un fichier de config pour Promtail
sudo nano /etc/promtail-config.yaml
------------->
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /var/log/promtail-positions.yaml
clients:
- url: http://<IP_VM_SUPERVISION>:3100/loki/api/v1/push
scrape_configs:
- job_name: apache_access
static_configs:
- targets: ['localhost']
labels:
job: apache_access
host: apache_vm
__path__: /var/log/apache2/access.log
- job_name: apache_error
static_configs:
- targets: ['localhost']
labels:
job: apache_error
host: apache_vm
__path__: /var/log/apache2/error.log
------------->
4.3 Créer un service systemd pour Promtail
sudo nano /etc/systemd/system/promtail.service
------------->
[Unit]
Description=Promtail service for Apache logs
After=network.target
[Service]
User=root
ExecStart=/usr/local/bin/promtail -config.file /etc/promtail-config.yaml
Restart=always
[Install]
WantedBy=multi-user.target
------------->
sudo systemctl daemon-reload
sudo systemctl enable promtail
sudo systemctl start promtail
sudo systemctl status promtail

View File

@ -0,0 +1,56 @@
version: "3.8"
services:
prometheus:
image: prom/prometheus:latest
container_name: prometheus-observability
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- "9090:9090"
networks:
- observability
grafana:
image: grafana/grafana:latest
container_name: grafana-observability
ports:
- "3000:3000"
volumes:
- grafana-data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=admin
networks:
- observability
depends_on:
- loki
loki:
image: grafana/loki:2.8.2
container_name: loki-observability
ports:
- "3100:3100"
command: -config.file=/etc/loki/local-config.yaml
volumes:
- ./loki-config.yaml:/etc/loki/local-config.yaml
- ./loki-wal:/wal
- ./loki-chunks:/loki/chunks
- ./loki-index:/loki/index
networks:
- observability
promtail:
image: grafana/promtail:2.8.2
container_name: promtail-observability
volumes:
- ./promtail-config.yaml:/etc/promtail/config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /var/log:/var/log:ro
command:
- -config.file=/etc/promtail/config.yaml
networks:
- observability
volumes:
grafana-data:
networks:
observability:
driver: bridge

View File

@ -0,0 +1,59 @@
auth_enabled: false
server:
http_listen_port: 3100
grpc_listen_port: 9096
common:
path_prefix: /loki
storage:
filesystem:
chunks_directory: /loki/chunks
rules_directory: /loki/rules
replication_factor: 1
ring:
instance_addr: 127.0.0.1
kvstore:
store: inmemory
ingester:
wal:
enabled: true
dir: /wal
flush_on_shutdown: true
chunk_idle_period: 5m
chunk_retain_period: 30s
max_chunk_age: 1h
lifecycler:
ring:
replication_factor: 1
schema_config:
configs:
- from: 2020-10-24
store: boltdb-shipper
object_store: filesystem
schema: v11
index:
prefix: index_
period: 24h
storage_config:
boltdb_shipper:
active_index_directory: /loki/index
cache_location: /loki/index
shared_store: filesystem
filesystem:
directory: /loki/chunks
limits_config:
enforce_metric_name: false
reject_old_samples: true
reject_old_samples_max_age: 168h
ingestion_rate_mb: 10
ingestion_burst_size_mb: 20
compactor:
working_directory: /loki/compactor
shared_store: filesystem
compaction_interval: 10m

View File

@ -0,0 +1,15 @@
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']
- job_name: 'node_exporter_vmservices'
static_configs:
- targets: ['192.168.56.31:9100']
- job_name: 'apache_vmservices'
static_configs:
- targets: ['192.168.56.31:9117']

View File

@ -6,13 +6,14 @@ positions:
filename: /tmp/positions.yaml
clients:
- url: http://loki-observability:3100/loki/api/v1/push # URL du service Loki
- url: http://loki:3100/loki/api/v1/push
scrape_configs:
- job_name: 'docker-containers'
- job_name: docker_logs
static_configs:
- targets:
- localhost
labels:
job: docker
__path__: /var/lib/docker/containers/*/*.log # Chemin vers les logs Docker
__path__: /var/lib/docker/containers/*/*.log