"use client"; import { motion, useScroll, useTransform } from "framer-motion"; import { Brain, Search, Database, Zap, Shield, TrendingUp, CheckCircle, Activity, Cpu, Network, } from "lucide-react"; import { useRef, useEffect, useState } from "react"; export default function AIDiagnosis() { const containerRef = useRef(null); const canvasRef = useRef(null); const [modelLoaded, setModelLoaded] = useState(false); const [scanProgress, setScanProgress] = useState(0); const { scrollYProgress } = useScroll({ target: containerRef, offset: ["start end", "end start"], }); const y = useTransform(scrollYProgress, [0, 0.5], [100, 0]); const opacity = useTransform(scrollYProgress, [0, 0.3], [0, 1]); const scale = useTransform(scrollYProgress, [0, 0.5], [0.9, 1]); // Simulate scan progress useEffect(() => { const interval = setInterval(() => { setScanProgress((prev) => (prev >= 100 ? 0 : prev + 1)); }, 50); return () => clearInterval(interval); }, []); // Three.js setup useEffect(() => { if (typeof window === "undefined" || !canvasRef.current) return; let animationId: number | undefined; let scene: any; let camera: any; let renderer: any; let model: any; let controls: any; const initThreeJS = async () => { const THREE = await import("three"); const { GLTFLoader, OrbitControls } = await import("three-stdlib"); scene = new THREE.Scene(); camera = new THREE.PerspectiveCamera(45, 1, 0.1, 1000); camera.position.set(0, 0, 5); renderer = new THREE.WebGLRenderer({ canvas: canvasRef.current!, alpha: true, antialias: true, }); renderer.setSize(500, 500); renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2)); renderer.setClearColor(0x000000, 0); // Lighting scene.add(new THREE.AmbientLight(0xffffff, 0.8)); const dirLight1 = new THREE.DirectionalLight(0x6366f1, 1.2); dirLight1.position.set(5, 5, 5); scene.add(dirLight1); const dirLight2 = new THREE.DirectionalLight(0xa855f7, 0.8); dirLight2.position.set(-5, 3, -5); scene.add(dirLight2); // Controls controls = new OrbitControls(camera, renderer.domElement); controls.enableDamping = true; controls.dampingFactor = 0.05; controls.enableZoom = false; controls.autoRotate = true; controls.autoRotateSpeed = 1.5; // Model loading const loader = new GLTFLoader(); const modelURL = "/human_body.glb"; try { const gltf = await loader.loadAsync(modelURL); model = gltf.scene; // Center & scale model const box = new THREE.Box3().setFromObject(model); const center = box.getCenter(new THREE.Vector3()); const size = box.getSize(new THREE.Vector3()); const maxDim = Math.max(size.x, size.y, size.z); const scaleFactor = 3 / maxDim; model.scale.setScalar(scaleFactor); model.position.sub(center.multiplyScalar(scaleFactor)); // Performance: disable shadows model.traverse((child: any) => { if (child.isMesh) { child.castShadow = false; child.receiveShadow = false; } }); scene.add(model); setModelLoaded(true); } catch (error) { console.error("Failed to load GLB model → using fallback", error); const geometry = new THREE.IcosahedronGeometry(1.5, 2); const material = new THREE.MeshStandardMaterial({ color: 0x6366f1, roughness: 0.3, metalness: 0.7, emissive: 0x3b82f6, emissiveIntensity: 0.2, }); model = new THREE.Mesh(geometry, material); scene.add(model); setModelLoaded(true); } // Animation loop const animate = () => { animationId = requestAnimationFrame(animate); controls.update(); if (model) model.rotation.y += 0.002; renderer.render(scene, camera); }; animate(); }; initThreeJS(); // Cleanup return () => { if (animationId !== undefined) { cancelAnimationFrame(animationId); } if (controls) controls.dispose(); if (renderer) renderer.dispose(); if (scene) { scene.traverse((obj: any) => { if (obj.geometry) obj.geometry.dispose(); if (obj.material) { if (Array.isArray(obj.material)) { obj.material.forEach((m: any) => m.dispose()); } else { obj.material.dispose(); } } }); } }; }, []); return (
{/* Ambient glows */}
{/* Subtle grid */}
{/* 3D Viewer Column */}
{/* Header */}

Neural Diagnostic Engine

Real-time Analysis Active

Live
{/* Canvas area */}
{!modelLoaded && (

Initializing Neural Network...

)} {modelLoaded && ( <> )}
{/* Status footer */}
Analysis Progress {scanProgress}%
{[ { icon: Cpu, label: "Processing", value: "98.4%", color: "text-blue-400" }, { icon: Network, label: "Neural Load", value: "76.2%", color: "text-purple-400" }, { icon: Activity, label: "Accuracy", value: "99.8%", color: "text-green-400" }, ].map((stat, idx) => (

{stat.label}

{stat.value}

))}

Drag to rotate • Interactive 3D visualization

{/* Text / Features Column */}

Neural Clinical
Synthesis

Enterprise-grade diagnostic assistance powered by advanced AI. Cross-reference clinical patterns against{" "} global medical databases with{" "} sub-second latency and unparalleled accuracy.

{/* Feature Cards */} {[ { icon: Search, title: "Pattern Recognition", text: "AI-powered clinical marker identification with zero-bias analysis technology.", color: "from-blue-500 to-cyan-500", iconColor: "text-blue-400", }, { icon: Database, title: "Medical Database", text: "Instant access to verified institutional medical models and research.", color: "from-purple-500 to-pink-500", iconColor: "text-purple-400", }, ].map((item, i) => (

{item.title}

{item.text}

))} {/* Stats */} {[ { icon: Zap, value: "<500ms", label: "Query Time" }, { icon: Shield, value: "100%", label: "Secure" }, { icon: TrendingUp, value: "99.8%", label: "Accuracy" }, ].map((stat, i) => (

{stat.value}

{stat.label}

))}
{/* Trust Indicators */} {["FDA Compliant", "HIPAA Certified", "ISO 27001"].map((badge, i) => (
{badge}
))}
); }