prestation/faceebene/sav1/liveness_original.php
2025-12-05 10:42:46 +00:00

322 lines
13 KiB
PHP
Executable File
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

<style>
:root { font-family: system-ui, -apple-system, Segoe UI, Roboto, Arial, sans-serif; }
body { margin: 0; background: #0f172a; color: #e2e8f0; }
header { padding: 16px 24px; background: #111827; display:flex; align-items:center; gap:12px; }
h1 { font-size: 18px; margin: 0; }
main { display:grid; grid-template-columns: 1fr 360px; gap: 16px; padding: 16px; }
.stage { position: relative; aspect-ratio: 16/9; background: #111827; border: 1px solid #1f2937; border-radius: 12px; overflow: hidden; }
video, canvas { position: absolute; inset: 0; width: 100%; height: 100%; object-fit: cover; }
.panel { background: #111827; border: 1px solid #1f2937; border-radius: 12px; padding: 16px; }
.row { display:flex; justify-content: space-between; align-items:center; margin: 8px 0; }
.pill { display:inline-flex; align-items:center; gap:8px; padding:6px 10px; border-radius:999px; border:1px solid #1f2937; font-size:12px; }
.ok { color:#10b981; }
.warn { color:#f59e0b; }
.bad { color:#ef4444; }
button { background:#1f2937; color:#e5e7eb; border:1px solid #374151; border-radius:10px; padding:10px 12px; cursor:pointer; }
button:disabled { opacity:.5; cursor:not-allowed; }
small { color:#94a3b8; }
.meter { height: 8px; border-radius: 999px; background:#0b1220; border:1px solid #1f2937; overflow:hidden; }
.meter > div { height: 100%; background: linear-gradient(90deg,#22c55e,#16a34a); width:0%; }
.grid { display:grid; grid-template-columns: 1fr 1fr; gap:8px; }
.kpi { background:#0b1220; border:1px solid #1f2937; border-radius:10px; padding:10px; }
code { background:#0b1220; padding:2px 6px; border-radius:6px; }
</style>
<header>
<h1>Détection de vivacité (Liveness)</h1>
<div class="pill"><span>🎥</span><span id="camStatus">Caméra : inactif</span></div>
<div class="pill"><span>🧠</span><span id="mpStatus">Modèle : non chargé</span></div>
</header>
<main>
<section class="stage">
<video id="video" playsinline muted></video>
<canvas id="overlay"></canvas>
</section>
<aside class="panel">
<div class="row" style="margin-bottom:8px">
<button id="btnStart">Démarrer</button>
<button id="btnStop" disabled>Arrêter</button>
</div>
<div class="row">
<strong>Statut vivacité</strong>
<span id="liveBadge" class="pill bad">Non vérifié</span>
</div>
<div class="meter" style="margin:8px 0 16px">
<div id="liveMeter"></div>
</div>
<div class="grid">
<div class="kpi"><div>Clignements</div><div id="blinkCount" style="font-size:22px">0</div><small>EAR&lt;seuil</small></div>
<div class="kpi"><div>Mouvements tête</div><div id="headMoves" style="font-size:22px">0</div><small>yaw/roll Δ</small></div>
<div class="kpi"><div>Confiance visage</div><div id="faceScore" style="font-size:22px">0.00</div><small>presence score</small></div>
<div class="kpi"><div>FPS</div><div id="fps" style="font-size:22px">0</div><small>approx</small></div>
</div>
<hr style="border-color:#1f2937; margin:16px 0" />
<input class="sr-only" id="earThresh" type="number" min="0" max="1" step="0.01" value="0.21">
<input class="sr-only" id="closedFrames" type="number" min="1" max="15" step="1" value="3">
<input class="sr-only" id="moveThresh" type="number" min="0" max="30" step="0.5" value="6">
<input class="sr-only" id="proofNeeded" type="number" min="1" max="10" step="1" value="3">
<hr style="border-color:#1f2937; margin:16px 0" />
<p><strong>Conseils :</strong> placez-vous face caméra, éclairez le visage, clignez des yeux et tournez légèrement la tête.</p>
</aside>
</main>
<!-- MediaPipe Tasks Vision (web) -->
<script type="module">
// -- Dépendances MediaPipe Tasks Vision
import {
FilesetResolver,
FaceLandmarker,
DrawingUtils
} from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision/vision_bundle.js";
// URLs modèles (hébergés par Google)
const MP_FACE_TASK = "https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task";
// Références DOM
const video = document.getElementById('video');
const canvas = document.getElementById('overlay');
const ctx = canvas.getContext('2d');
const btnStart = document.getElementById('btnStart');
const btnStop = document.getElementById('btnStop');
const camStatus = document.getElementById('camStatus');
const mpStatus = document.getElementById('mpStatus');
const blinkCountEl = document.getElementById('blinkCount');
const headMovesEl = document.getElementById('headMoves');
const faceScoreEl = document.getElementById('faceScore');
const fpsEl = document.getElementById('fps');
const liveBadge = document.getElementById('liveBadge');
const liveMeter = document.getElementById('liveMeter');
const earThreshEl = document.getElementById('earThresh');
const closedFramesEl= document.getElementById('closedFrames');
const moveThreshEl = document.getElementById('moveThresh');
const proofNeededEl = document.getElementById('proofNeeded');
// État
let running = false;
let faceLandmarker; // modèle
let lastVideoTime = -1;
let rafId = null;
// Métriques liveness
let blinkCount = 0;
let closedConsec = 0;
let lastEAR = 1;
let headMoves = 0;
let lastYaw = null, lastRoll = null;
let facePresenceScore = 0;
let livenessScore = 0; // somme pondérée des preuves
let lastFpsT = performance.now();
let frames = 0;
// Indices MediaPipe FaceMesh pour calcul EAR (6 points par œil)
// Schéma: EAR = (||p2-p6|| + ||p3-p5||) / (2*||p1-p4||)
const LEFT_EYE = [33,160,158,133,153,144];
const RIGHT_EYE = [263,387,385,362,380,373];
const distance = (a,b)=> Math.hypot(a.x-b.x, a.y-b.y);
function eyeEAR(landmarks, idxs){
const [p1,p2,p3,p4,p5,p6] = idxs.map(i=>landmarks[i]);
const vert = distance(p2,p6) + distance(p3,p5);
const horiz= distance(p1,p4)*2;
return horiz>0 ? (vert/horiz) : 0;
}
function estimateYawRoll(landmarks){
// yaw ~ orientation horizontale via ligne yeux, roll ~ inclinaison de la tête
const left = avgPoint([33,133].map(i=>landmarks[i]));
const right= avgPoint([263,362].map(i=>landmarks[i]));
const dx = right.x - left.x;
const dy = right.y - left.y;
const roll = -rad2deg(Math.atan2(dy, dx));
// yaw approximé par asymétrie distance nez-centres yeux
const nose = landmarks[1] || landmarks[4];
const midEye = {x:(left.x+right.x)/2, y:(left.y+right.y)/2};
const yaw = rad2deg(Math.atan2(nose.x - midEye.x, 0.5)); // pseudo-yaw basé sur décalage nez
return {yaw, roll};
}
const rad2deg = r=> r*180/Math.PI;
const avgPoint = (pts)=>({x: pts.reduce((s,p)=>s+p.x,0)/pts.length, y: pts.reduce((s,p)=>s+p.y,0)/pts.length});
function updateLiveUI(){
blinkCountEl.textContent = String(blinkCount);
headMovesEl.textContent = String(headMoves);
faceScoreEl.textContent = facePresenceScore.toFixed(2);
// Score simple: 1 point par clignement (max 2), 1 point par move tête (max 2), +1 si présence stable (>0.5)
const maxBlinkPoints = Math.min(blinkCount, 2);
const maxMovePoints = Math.min(headMoves, 2);
const presencePoint = facePresenceScore > 0.5 ? 1 : 0;
livenessScore = maxBlinkPoints + maxMovePoints + presencePoint;
const proofNeeded = Number(proofNeededEl.value);
const pct = Math.min(100, Math.round(100*livenessScore/Math.max(1,proofNeeded)));
liveMeter.style.width = pct + '%';
if(livenessScore >= proofNeeded){
liveBadge.className = 'pill ok';
liveBadge.textContent = 'Vivant confirmé';
} else if (livenessScore>0){
liveBadge.className = 'pill warn';
liveBadge.textContent = 'Indices de vivacité';
} else {
liveBadge.className = 'pill bad';
liveBadge.textContent = 'Non vérifié';
}
}
async function loadModel(){
mpStatus.textContent = 'Modèle : chargement…';
const filesetResolver = await FilesetResolver.forVisionTasks(
// wasm path (CDN jsDelivr) — laisse MediaPipe gérer les dépendances
'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.11/wasm'
);
faceLandmarker = await FaceLandmarker.createFromOptions(filesetResolver,{
baseOptions: { modelAssetPath: MP_FACE_TASK },
runningMode: 'VIDEO',
numFaces: 1,
outputFaceBlendshapes: false,
outputFacialTransformationMatrixes: true
});
mpStatus.textContent = 'Modèle : prêt';
}
async function start(){
try{
btnStart.disabled = true;
await loadModel();
const stream = await navigator.mediaDevices.getUserMedia({
video: { facingMode: 'user', width: {ideal: 1280}, height:{ideal:720} },
audio: false
});
video.srcObject = stream;
await video.play();
camStatus.textContent = 'Caméra : OK';
running = true;
canvas.width = video.videoWidth || 1280;
canvas.height= video.videoHeight|| 720;
btnStop.disabled = false;
lastVideoTime = -1;
loop();
}catch(err){
console.error(err);
camStatus.textContent = 'Caméra : échec ('+ (err?.name||'Erreur') +')';
btnStart.disabled = false;
}
}
function stop(){
running = false;
if(rafId) cancelAnimationFrame(rafId);
const stream = video.srcObject;
if(stream){
stream.getTracks().forEach(t=>t.stop());
}
video.srcObject = null;
btnStart.disabled = false;
btnStop.disabled = true;
camStatus.textContent = 'Caméra : inactif';
mpStatus.textContent = faceLandmarker ? 'Modèle : prêt' : 'Modèle : non chargé';
}
function drawLandmarks(landmarks){
ctx.clearRect(0,0,canvas.width, canvas.height);
const drawUtils = new DrawingUtils(ctx);
// Points clés yeux + traits simples
const eyePts = [...LEFT_EYE, ...RIGHT_EYE];
drawUtils.drawLandmarks(landmarks.filter((_,i)=> eyePts.includes(i)), {lineWidth: 2, color: '#22c55e', radius: 2});
}
function loop(){
if(!running) return;
const startT = performance.now();
const nowVideoTime = video.currentTime;
if(nowVideoTime !== lastVideoTime){
lastVideoTime = nowVideoTime;
const res = faceLandmarker.detectForVideo(video, performance.now());
const faces = res.faceLandmarks || [];
if(faces.length){
const lm = faces[0];
drawLandmarks(lm);
// EAR pour chaque œil
const earL = eyeEAR(lm, LEFT_EYE);
const earR = eyeEAR(lm, RIGHT_EYE);
const ear = (earL + earR)/2;
const earThresh = Number(earThreshEl.value);
const closedNeeded = Number(closedFramesEl.value);
if(ear < earThresh){
closedConsec++;
} else {
if(closedConsec >= closedNeeded){
blinkCount++;
}
closedConsec = 0;
}
// Head movement (yaw/roll change)
const {yaw, roll} = estimateYawRoll(lm);
const moveThresh = Number(moveThreshEl.value);
if(lastYaw!==null && lastRoll!==null){
const dYaw = Math.abs(yaw - lastYaw);
const dRoll= Math.abs(roll - lastRoll);
if(dYaw > moveThresh || dRoll > moveThresh){
headMoves++;
}
}
lastYaw = yaw; lastRoll = roll;
// Présence visage (proxy via nb de points vs bruit)
facePresenceScore = 0.7; // simplifié: présent
} else {
ctx.clearRect(0,0,canvas.width, canvas.height);
facePresenceScore = 0.0;
}
// MAJ UI
updateLiveUI();
}
// FPS approx
frames++;
const t = performance.now();
if(t - lastFpsT > 1000){
fpsEl.textContent = String(frames);
frames = 0; lastFpsT = t;
}
rafId = requestAnimationFrame(loop);
const endT = performance.now();
// Option: utiliser endT-startT si besoin de profiler
}
btnStart.addEventListener('click', start);
btnStop.addEventListener('click', stop);
// Conseil : demander permission caméra immédiatement (optionnel)
// navigator.mediaDevices.getUserMedia({video:true}).then(s=>s.getTracks().forEach(t=>t.stop()));
</script>
</body>
</html>