'use client' import React, { useEffect, useRef, useState } from 'react' import { Engine, Scene, Vector3, HemisphericLight, ArcRotateCamera, Color3, Color4, AbstractMesh, Nullable, HighlightLayer, Mesh, InstancedMesh, Animation, CubicEase, EasingFunction, Matrix, Viewport, ImportMeshAsync, } from '@babylonjs/core' import '@babylonjs/loaders' import LoadingSpinner from '../ui/LoadingSpinner' export interface ModelViewerProps { modelPath: string onModelLoaded?: (modelData: { meshes: AbstractMesh[] boundingBox: { min: { x: number; y: number; z: number } max: { x: number; y: number; z: number } } }) => void onError?: (error: string) => void focusSensorId?: string | null renderOverlay?: (params: { anchor: { left: number; top: number } | null; info?: { name?: string; sensorId?: string } | null }) => React.ReactNode } const ModelViewer: React.FC = ({ modelPath, onModelLoaded, onError, focusSensorId, renderOverlay, }) => { const canvasRef = useRef(null) const engineRef = useRef>(null) const sceneRef = useRef>(null) const [isLoading, setIsLoading] = useState(false) const [loadingProgress, setLoadingProgress] = useState(0) const [showModel, setShowModel] = useState(false) const isInitializedRef = useRef(false) const isDisposedRef = useRef(false) const importedMeshesRef = useRef([]) const highlightLayerRef = useRef(null) const chosenMeshRef = useRef(null) const [overlayPos, setOverlayPos] = useState<{ left: number; top: number } | null>(null) const [overlayData, setOverlayData] = useState<{ name?: string; sensorId?: string } | null>(null) const [modelReady, setModelReady] = useState(false) useEffect(() => { isDisposedRef.current = false isInitializedRef.current = false return () => { isDisposedRef.current = true } }, []) useEffect(() => { if (!canvasRef.current || isInitializedRef.current) return const canvas = canvasRef.current const engine = new Engine(canvas, true) engineRef.current = engine engine.runRenderLoop(() => { if (!isDisposedRef.current && sceneRef.current) { sceneRef.current.render() } }) const scene = new Scene(engine) sceneRef.current = scene scene.clearColor = new Color4(0.1, 0.1, 0.15, 1) const camera = new ArcRotateCamera('camera', 0, Math.PI / 3, 20, Vector3.Zero(), scene) camera.attachControl(canvas, true) camera.lowerRadiusLimit = 2 camera.upperRadiusLimit = 200 camera.wheelDeltaPercentage = 0.01 camera.panningSensibility = 50 camera.angularSensibilityX = 1000 camera.angularSensibilityY = 1000 const ambientLight = new HemisphericLight('ambientLight', new Vector3(0, 1, 0), scene) ambientLight.intensity = 0.4 ambientLight.diffuse = new Color3(0.7, 0.7, 0.8) ambientLight.specular = new Color3(0.2, 0.2, 0.3) ambientLight.groundColor = new Color3(0.3, 0.3, 0.4) const keyLight = new HemisphericLight('keyLight', new Vector3(1, 1, 0), scene) keyLight.intensity = 0.6 keyLight.diffuse = new Color3(1, 1, 0.9) keyLight.specular = new Color3(1, 1, 0.9) const fillLight = new HemisphericLight('fillLight', new Vector3(-1, 0.5, -1), scene) fillLight.intensity = 0.3 fillLight.diffuse = new Color3(0.8, 0.8, 1) const hl = new HighlightLayer('highlight-layer', scene) highlightLayerRef.current = hl const handleResize = () => { if (!isDisposedRef.current) { engine.resize() } } window.addEventListener('resize', handleResize) isInitializedRef.current = true return () => { isDisposedRef.current = true isInitializedRef.current = false window.removeEventListener('resize', handleResize) highlightLayerRef.current?.dispose() highlightLayerRef.current = null if (engineRef.current) { engineRef.current.dispose() engineRef.current = null } sceneRef.current = null } }, []) useEffect(() => { if (!isInitializedRef.current || isDisposedRef.current) { return } if (!modelPath || modelPath.trim() === '') { console.warn('[ModelViewer] No model path provided') // Не вызываем onError для пустого пути - это нормальное состояние при инициализации setIsLoading(false) return } const loadModel = async () => { if (!sceneRef.current || isDisposedRef.current) { return } const currentModelPath = modelPath; console.log('[ModelViewer] Starting model load:', currentModelPath); setIsLoading(true) setLoadingProgress(0) setShowModel(false) setModelReady(false) const oldMeshes = sceneRef.current.meshes.slice(); const activeCameraId = sceneRef.current.activeCamera?.uniqueId; console.log('[ModelViewer] Cleaning up old meshes. Total:', oldMeshes.length); oldMeshes.forEach(m => { if (m.uniqueId !== activeCameraId) { m.dispose(); } }); console.log('[ModelViewer] Loading GLTF model:', currentModelPath) // UI элемент загрузчика (есть эффект замедленности) const progressInterval = setInterval(() => { setLoadingProgress(prev => { if (prev >= 90) { clearInterval(progressInterval) return 90 } return prev + Math.random() * 15 }) }, 100) try { console.log('[ModelViewer] Calling ImportMeshAsync with path:', currentModelPath); // Проверим доступность файла через fetch try { const testResponse = await fetch(currentModelPath, { method: 'HEAD' }); console.log('[ModelViewer] File availability check:', { url: currentModelPath, status: testResponse.status, statusText: testResponse.statusText, ok: testResponse.ok }); } catch (fetchError) { console.error('[ModelViewer] File fetch error:', fetchError); } const result = await ImportMeshAsync(currentModelPath, sceneRef.current) console.log('[ModelViewer] ImportMeshAsync completed successfully'); console.log('[ModelViewer] Import result:', { meshesCount: result.meshes.length, particleSystemsCount: result.particleSystems.length, skeletonsCount: result.skeletons.length, animationGroupsCount: result.animationGroups.length }); if (isDisposedRef.current || modelPath !== currentModelPath) { console.log('[ModelViewer] Model loading aborted - model changed during load') clearInterval(progressInterval) setIsLoading(false) return; } importedMeshesRef.current = result.meshes clearInterval(progressInterval) setLoadingProgress(100) console.log('[ModelViewer] GLTF Model loaded successfully!', result) if (result.meshes.length > 0) { const boundingBox = result.meshes[0].getHierarchyBoundingVectors() const size = boundingBox.max.subtract(boundingBox.min) const maxDimension = Math.max(size.x, size.y, size.z) const camera = sceneRef.current!.activeCamera as ArcRotateCamera camera.radius = maxDimension * 2 camera.target = result.meshes[0].position importedMeshesRef.current = result.meshes setModelReady(true) onModelLoaded?.({ meshes: result.meshes, boundingBox: { min: boundingBox.min, max: boundingBox.max } }) // Плавное появление модели setTimeout(() => { if (!isDisposedRef.current && modelPath === currentModelPath) { setShowModel(true) setIsLoading(false) } else { console.log('Model display aborted - model changed during animation') } }, 500) } else { console.warn('No meshes found in model') onError?.('В модели не найдена геометрия') setIsLoading(false) } } catch (error) { clearInterval(progressInterval) // Only report error if this loading is still relevant if (!isDisposedRef.current && modelPath === currentModelPath) { console.error('Error loading GLTF model:', error) const errorMessage = error instanceof Error ? error.message : String(error) onError?.(`Ошибка загрузки модели: ${errorMessage}`) } else { console.log('Error occurred but loading was aborted - model changed') } setIsLoading(false) } } // Загрузка модлеи начинается после появления спиннера requestIdleCallback(() => loadModel(), { timeout: 50 }) }, [modelPath, onError, onModelLoaded]) useEffect(() => { if (!sceneRef.current || isDisposedRef.current || !modelReady) return const sensorId = (focusSensorId ?? '').trim() if (!sensorId) { console.log('[ModelViewer] Focus cleared (no Sensor_ID provided)') highlightLayerRef.current?.removeAllMeshes() chosenMeshRef.current = null setOverlayPos(null) setOverlayData(null) return } const allMeshes = importedMeshesRef.current || [] if (allMeshes.length === 0) { console.warn('[ModelViewer] No meshes available for sensor matching') highlightLayerRef.current?.removeAllMeshes() chosenMeshRef.current = null setOverlayPos(null) setOverlayData(null) return } const sensorMeshes = allMeshes.filter((m: any) => { try { return ((m.id ?? '').includes('IfcSensor') || (m.name ?? '').includes('IfcSensor')) } catch (error) { console.warn('[ModelViewer] Error filtering sensor mesh:', error) return false } }) const chosen = sensorMeshes.find((m: any) => { try { const meta: any = (m as any)?.metadata const extras: any = meta?.gltf?.extras ?? meta?.extras ?? (m as any)?.extras const sid = extras?.Sensor_ID ?? extras?.sensor_id ?? extras?.SERIAL_NUMBER ?? extras?.serial_number if (sid != null) { return String(sid).trim() === sensorId } const monitoringSensorInstance = extras?.bonsaiPset_ARBM_PSet_MonitoringSensor_Instance if (monitoringSensorInstance && typeof monitoringSensorInstance === 'string') { try { const parsedInstance = JSON.parse(monitoringSensorInstance) const instanceSensorId = parsedInstance?.Sensor_ID if (instanceSensorId != null) { return String(instanceSensorId).trim() === sensorId } } catch (parseError) { console.warn('[ModelViewer] Error parsing MonitoringSensor_Instance JSON:', parseError) } } return false } catch (error) { console.warn('[ModelViewer] Error matching sensor mesh:', error) return false } }) console.log('[ModelViewer] Sensor focus', { requested: sensorId, totalImportedMeshes: allMeshes.length, totalSensorMeshes: sensorMeshes.length, chosen: chosen ? { id: chosen.id, name: chosen.name, uniqueId: chosen.uniqueId, parent: chosen.parent?.name } : null, source: 'result.meshes', }) const scene = sceneRef.current! if (chosen) { try { const camera = scene.activeCamera as ArcRotateCamera const bbox = (typeof chosen.getHierarchyBoundingVectors === 'function') ? chosen.getHierarchyBoundingVectors() : { min: chosen.getBoundingInfo().boundingBox.minimumWorld, max: chosen.getBoundingInfo().boundingBox.maximumWorld } const center = bbox.min.add(bbox.max).scale(0.5) const size = bbox.max.subtract(bbox.min) const maxDimension = Math.max(size.x, size.y, size.z) const targetRadius = Math.max(camera.lowerRadiusLimit ?? 2, maxDimension * 1.5) scene.stopAnimation(camera) const ease = new CubicEase() ease.setEasingMode(EasingFunction.EASINGMODE_EASEINOUT) const frameRate = 60 const durationMs = 600 const totalFrames = Math.round((durationMs / 1000) * frameRate) Animation.CreateAndStartAnimation('camTarget', camera, 'target', frameRate, totalFrames, camera.target.clone(), center.clone(), Animation.ANIMATIONLOOPMODE_CONSTANT, ease) Animation.CreateAndStartAnimation('camRadius', camera, 'radius', frameRate, totalFrames, camera.radius, targetRadius, Animation.ANIMATIONLOOPMODE_CONSTANT, ease) const hl = highlightLayerRef.current if (hl) { hl.removeAllMeshes() if (chosen instanceof Mesh) { hl.addMesh(chosen, new Color3(1, 1, 0)) } else if (chosen instanceof InstancedMesh) { hl.addMesh(chosen.sourceMesh, new Color3(1, 1, 0)) } else { const children = typeof (chosen as any)?.getChildMeshes === 'function' ? (chosen as any).getChildMeshes() : [] for (const cm of children) { if (cm instanceof Mesh) { hl.addMesh(cm, new Color3(1, 1, 0)) } } } } chosenMeshRef.current = chosen setOverlayData({ name: chosen.name, sensorId }) } catch (error) { console.error('[ModelViewer] Error focusing on sensor mesh:', error) highlightLayerRef.current?.removeAllMeshes() chosenMeshRef.current = null setOverlayPos(null) setOverlayData(null) } } else { highlightLayerRef.current?.removeAllMeshes() chosenMeshRef.current = null setOverlayPos(null) setOverlayData(null) } }, [focusSensorId, modelReady]) useEffect(() => { const scene = sceneRef.current if (!scene || isDisposedRef.current) return const observer = scene.onAfterRenderObservable.add(() => { const chosen = chosenMeshRef.current if (!chosen) return try { const engine = scene.getEngine() const cam = scene.activeCamera if (!cam) return const center = chosen.getBoundingInfo().boundingBox.centerWorld const world = Matrix.IdentityReadOnly const transform = scene.getTransformMatrix() const viewport = new Viewport(0, 0, engine.getRenderWidth(), engine.getRenderHeight()) const projected = Vector3.Project(center, world, transform, viewport) setOverlayPos({ left: projected.x, top: projected.y }) } catch (error) { console.warn('[ModelViewer] Error updating overlay position:', error) setOverlayPos(null) } }) return () => { scene.onAfterRenderObservable.remove(observer) } }, []) return (
{!modelPath ? (
3D модель не выбрана
Выберите модель в панели «Зоны мониторинга», чтобы начать просмотр
Если список пуст, добавьте файлы в каталог assets/big-models или проверьте API
) : ( <> {isLoading && (
)} {!modelReady && !isLoading && (
3D модель не загружена
Модель не готова к отображению
)} )} {renderOverlay ? renderOverlay({ anchor: overlayPos, info: overlayData }) : (overlayData && overlayPos && (
{overlayData.name || 'Sensor'}
{overlayData.sensorId &&
ID: {overlayData.sensorId}
}
)) }
) } export default ModelViewer