我们将通过three.js技术打造3d隧道监测可视化项目,隧道监测项目将涵盖照明,风机的运行情况,控制车道指示灯关闭,情报板、火灾报警告警、消防安全、车行横洞、风向仪、隧道紧急逃生出口的控制以及事故模拟等!那先来看看我们的初步成果!因为作者也是在边学习边做的情况,效果有些丑陋,希望不要见笑!!!three.js基础知识还是基本涵盖了,入门还是很有参考价值的!
我们将通过一个基本的three.js模板代码更好的概况我们的基础元素
import React, { useEffect } from 'react';
import * as THREE from 'three';
// eslint-disable-next-line import/extensions
import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js';
export default function ThreeVisual() {
// 场景
let scene;
// 相机
let camera;
// 控制器
let controls;
// 网络模型
let mesh;
// 渲染器
let renderer;
// debugger属性
const debugObject = {
light: {
amlight: {
color: 0xffffff,
},
directionalLight: {
color: 0xffffff,
position: {
x: 0,
y: 400,
z: 1800,
},
},
pointLight: {
color: 0xff0000,
position: {
x: 0,
y: 400,
z: 1800,
},
},
},
};
const sizes = {
width: window.innerWidth,
height: window.innerHeight,
};
useEffect(() => {
// eslint-disable-next-line no-use-before-define
threeStart();
}, []);
const initThree = () => {
const width = document.getElementById('threeMain').clientWidth;
const height = document.getElementById('threeMain').clientHeight;
renderer = new THREE.WebGLRenderer({
antialias: true,
logarithmicDepthBuffer: true,
});
renderer.shadowMap.enabled = true;
renderer.setSize(width, height);
document.getElementById('threeMain').appendChild(renderer.domElement);
};
const initCamera = (width, height) => {
camera = new THREE.PerspectiveCamera(45, width / height, 0.1, 10000);
camera.position.x = 0;
camera.position.y = 500;
camera.position.z = 1300;
camera.up.x = 0;
camera.up.y = 1;
camera.up.z = 0;
camera.lookAt({
x: 0,
y: 0,
z: 0,
});
// 创建相机视锥体辅助对象
// const cameraPerspectiveHelper = new THREE.CameraHelper(camera);
// scene.add(cameraPerspectiveHelper);
};
const initScene = () => {
scene = new THREE.Scene();
scene.background = new THREE.Color(0xbfd1e5);
};
const initLight = () => {
// 环境光
const amlight = new THREE.AmbientLight(debugObject.light.amlight.color);
amlight.position.set(1000, 1000, 1000);
scene.add(amlight);
};
const initObject = () => {
const geometry = new THREE.BoxGeometry(3000, 6, 2400);
const material = new THREE.MeshBasicMaterial({color: 0xcccccc});
geometry.position = new THREE.Vector3(0, 0, 0);
mesh = new THREE.Mesh(geometry, [material, material, material, material, material, material]);
mesh.receiveShadow = true; // cast投射,方块投射阴影
scene.add(mesh);
}
const initControl = () => {
// 将renderer关联到container,这个过程类似于获取canvas元素
const pcanvas = document.getElementById('threeMain');
controls = new OrbitControls(camera, pcanvas);
// 如果使用animate方法时,将此函数删除
// controls.addEventListener( 'change', render );
// 使动画循环使用时阻尼或自转 意思是否有惯性
controls.enableDamping = true;
// 动态阻尼系数 就是鼠标拖拽旋转灵敏度
// controls.dampingFactor = 0.25;
// 是否可以缩放
controls.enableZoom = true;
// 是否自动旋转
// controls.autoRotate = true;
controls.autoRotateSpeed = 0.5;
// 设置相机距离原点的最近距离
// controls.minDistance = 10;
// 设置相机距离原点的最远距离
controls.maxDistance = 10000;
// 是否开启右键拖拽
controls.enablePan = true;
};
function animation() {
renderer.render(scene, camera);
// mesh.rotateY(0.01);
requestAnimationFrame(animation);
}
function initHelper() {
const axesHelper = new THREE.AxesHelper(3000);
scene.add(axesHelper);
}
function threeStart() {
initThree();
initScene();
initCamera(sizes.width, sizes.height);
initHelper();
initObject();
initLight();
initControl();
animation();
}
return <div id="threeMain" style={{ width: '100vw', height: '100vh' }} />;
}
是一个三维空间,相当于我们html中的body,所有节点的容器,相当于一个空房间,承载所有的物品!所以我们定义一个全局变量scene。
初始化我们可以这样:
const initScene = () => {
scene = new THREE.Scene();
scene.background = new THREE.Color(0xbfd1e5);
};
打个比方,就是你买了一个1万元的相机出门拍风景,你总是想要抓住最美的风景,那你便要调好相机最精确的位置、角度、焦距等,相机看到的内容就是我们最终在屏幕上看到的内容。在这个例子中我们用的是像我们眼睛的透视相机PerspectiveCamera。
还有一个常用的相机是正交相机OrthographicCamera,它看到的范围不会受距离影响!
我们也定义了一个全局变量camera,
初始化我们可以这样:
const initCamera = (width, height) => {
camera = new THREE.PerspectiveCamera(45, width / height, 0.1, 10000);
camera.position.x = 0;
camera.position.y = 500;
camera.position.z = 1300;
camera.up.x = 0;
camera.up.y = 1;
camera.up.z = 0;
camera.lookAt({
x: 0,
y: 0,
z: 0,
});
// 创建相机视锥体辅助对象
// const cameraPerspectiveHelper = new THREE.CameraHelper(camera);
// scene.add(cameraPerspectiveHelper);
};
在介绍它之前我们需要先了解点模型Points、线模型Line。点线面,面就是Mesh模型。点模型Points、线模型Line、网格网格模型Mesh都是由几何体Geometry和材质Material构成。在这里就不过多研究点线面了,我们最重要的知道的是一个网络模型就是一个物体穿上了衣服,没有穿衣服的皇帝不会让别人揭穿和笑话,但是我们的老板才是皇帝,所以尽量给我们的模型套件衣服吧!如果你想换件漂亮的衣服你可以看看我以前写的这篇关于材质贴图[1]的文章。
同理定义一个全局变量mesh,
初始化我们可以这样:
const geometry = new THREE.BoxGeometry(3000, 6, 2400);
const material = new THREE.MeshBasicMaterial({color: 0xcccccc});
geometry.position = new THREE.Vector3(0, 0, 0);
mesh = new THREE.Mesh(geometry, [material, material, material, material, material, material]);
mesh.receiveShadow = true; // cast投射,方块投射阴影
scene.add(mesh);
没有光世界便是黑暗的!同理假如没有光,摄像机看不到任何东西。所以我们需要为我们的场景加上不同光照效果。我们先从最基础的环境光AmbientLight开始。环境光意思就是哪个角度、哪个位置的光照亮度强度都一样。因为光不需要重复使用,所以我们没必要定义全局变量,所以我们初始化可以这样:
const initLight = () => {
// 环境光
const amlight = new THREE.AmbientLight(debugObject.light.amlight.color);
amlight.position.set(1000, 1000, 1000);
scene.add(amlight);
};
就相当于现实生活中你带着相机,现在去了一个美丽的地方,你需要一个相片承载下这个美丽的景色,对于threejs而言,如果你需要这张相片,就需要一个新的对象,也就是WebGL渲染器WebGLRenderer,把这些承载。
同理我们定义一个全局变量renderer,初始化我们可以这样:
renderer = new THREE.WebGLRenderer({
... //属性配置
});
渲染器还需要补充几点,就是如何和我们的dom节点关联起来:
渲染器WebGLRenderer通过属性domElement可以获得渲染方法render()生成的Canvas画布,domElement本质上就是一个HTML元素:Canvas画布。我们也可以通过setSize()来设置尺寸。
定义一个html元素
return <div id="threeMain" style={{ width: '100vw', height: '100vh' }} />;
html元素和渲染器关联,那就给div增加一个子节点(canvas)
const initThree = () => {
const width = document.getElementById('threeMain').clientWidth;
const height = document.getElementById('threeMain').clientHeight;
renderer = new THREE.WebGLRenderer({
... //属性配置
});
renderer.setSize(width, height); //设置画布宽高
document.getElementById('threeMain').appendChild(renderer.domElement); // 把画布加入dom节点
};
渲染器和我们的threejs元素关联, 那渲染器渲染方法.render(),把我们的场景和相机记录进来了!
renderer.render(scene, camera);
就是相当于可以通过我们的键盘和鼠标来控制我们的场景,使其有了交互功能!控制器种类有很多,但这里我们只说轨道控制器OrbitControls。它可以使得相机围绕目标进行轨道运动。打个比方(地球围绕太阳一样运动)。
同理我们定义一个全局变量controls,初始化我们可以这样:
controls = new OrbitControls(camera, pcanvas);
关联操作和属性介绍:
const initControl = () => {
// 将renderer关联到container,这个过程类似于获取canvas元素
const pcanvas = document.getElementById('threeMain');
controls = new OrbitControls(camera, pcanvas);
// 如果使用animate方法时,将此函数删除
// controls.addEventListener( 'change', render );
// 使动画循环使用时阻尼或自转 意思是否有惯性
controls.enableDamping = true;
// 动态阻尼系数 就是鼠标拖拽旋转灵敏度
// controls.dampingFactor = 0.25;
// 是否可以缩放
controls.enableZoom = true;
// 是否自动旋转
// controls.autoRotate = true;
controls.autoRotateSpeed = 0.5;
// 设置相机距离原点的最近距离
// controls.minDistance = 10;
// 设置相机距离原点的最远距离
controls.maxDistance = 10000;
// 是否开启右键拖拽
controls.enablePan = true;
};
到此,我们已经把threejs基础元素介绍的差不多了,在这里还需要补充一些很容易遗漏的地方!
动画和及时更新
function animation() {
controls.update()
renderer.render(scene, camera);
// mesh.rotateY(0.01);
requestAnimationFrame(animation);
}
补充一个知识点:
requestAnimationFrame
如图,我们首先实现发光这部分。
这部分主要涉及的知识是给一个平面(plane)贴图,具体的知识我在代码块相应位置已经标注。
// 图加载器
const loader = new THREE.TextureLoader();
// 加载
const texture = loader.load('/model/route.png', function(t) {
// eslint-disable-next-line no-param-reassign,no-multi-assign
t.wrapS = t.wrapT = THREE.RepeatWrapping; //是否重复渲染和css中的背景属性渲染方式很像
t.repeat.set(1, 1);
});
// 平面
const geometryRoute = new THREE.PlaneGeometry(1024, 2400);
const materialRoute = new THREE.MeshStandardMaterial({
map: texture, // 使用纹理贴图
side: THREE.BackSide, // 背面渲染
});
const plane = new THREE.Mesh(geometryRoute, materialRoute);
plane.receiveShadow = true;
plane.position.set(0, 8, 0);
plane.rotateX(Math.PI / 2);
scene.add(plane);
现在我们实现发光这部分
这部分主要涉及的知识是引入一个obj模型,并给模型贴上贴图(这里的材质是一个mtl)
补充知识点:
mtl文件(Material Library File)是材质库文件,描述的是物体的材质信息,ASCII存储,任何文本编辑器可以将其打开和编辑。同理我们也可以打开看看,是个什么东西
从obj文件看出我们需要tunnelWall.mtl材质,从mtl文件,看出我们需要suidao.jpg图片(需要和模型放在同一级),其实到这里我们还是回到了引入道路的那部分,模型+贴图环节。
但是还是有一些不同的地方的,首先使用的加载器不同
const mtlLoader = new MTLLoader();
const loader = new OBJLoader(); // 在init函数中,创建loader变量,用于导入模型
其次我们的模型是属于建模自己构造的,可能你引入进来很大可能是加载不出来的!所以你需要打印对象,从中分析具体原因。
// 模型对象公共变量
const modelsObj = {
tunnelWall: {
mtl: '/model/tunnelWall.mtl',
obj: '/model/tunnelWall.obj',
mesh: null,
},
camera: {
mtl: '/model/camera/摄像头方.mtl',
obj: '/model/camera/摄像头方.obj',
mesh: null,
},
};
mtlLoader.load(modelsObj.tunnelWall.mtl, material => {
material.preload();
// 设置材质的透明度
// mtl文件中的材质设置到obj加载器
loader.setMaterials(material);
loader.load(modelsObj.tunnelWall.obj, object => {
// 设置模型大小和中心点
object.children[0].geometry.computeBoundingBox();
object.children[0].geometry.center();
modelsObj.tunnelWall.mesh = object;
scene.add(object);
});
});
现在我们实现摄像头部分
这里其实和实现隧道大相径庭,只不过我们是多个,而隧道是单个。所以我们需要引入组(group)和克隆(clone)的概念。
知识点补充:
// 加载摄像头模型
const loadCameraModel = () => {
const mtlLoader = new MTLLoader();
const loader = new OBJLoader(); // 在init函数中,创建loader变量,用于导入模型
mtlLoader.load(modelsObj.camera.mtl, material => {
material.preload();
// 设置材质的透明度
// mtl文件中的材质设置到obj加载器
loader.setMaterials(material);
loader.load(modelsObj.camera.obj, object => {
console.log(object);
// 设置模型大小
object.children[0].geometry.computeBoundingBox();
object.children[0].geometry.center();
modelsObj.camera.mesh = object;
cloneCameraModel(4, 60, 180);
cloneCameraModel(4, -200, 180);
});
});
};
// 克隆摄像头模型
const cloneCameraModel = (cameraSize, lrInterval, baInterval) => {
const group = new THREE.Group();
for (let i = 0; i <= cameraSize; i += 1) {
modelsObj[`camera${i}`] = modelsObj.camera.mesh.clone();
modelsObj[`camera${i}`].position.set(lrInterval, 180, baInterval * (i % 2 === 0 ? -i : i));
modelsObj[`camera${i}`].scale.set(1, 1, 1);
group.add(modelsObj[`camera${i}`])
}
scene.add(group);
};
这块我们需要涉及的知识点是点击操作(Raycaster)、发光部分(效果合成器,shader渲染使用)、debugger模式(gui)
// 获取鼠标坐标 处理点击某个模型的事件
const mouse = new THREE.Vector2(); // 初始化一个2D坐标用于存储鼠标位置
const raycaster = new THREE.Raycaster(); // 初始化光线追踪
知识点补充:
光线投射raycaster:可以向特定方向投射光线,并测试哪些对象与其相交,由鼠标点击转为世界坐标的过程。就是把一个2d坐标转变成3d坐标的强大类!原理可以看这篇文章原理和推导过程[2]
我们监听屏幕点击事件
const pcanvas = document.getElementById('threeMain');
// 监听点击事件,pcanvas
pcanvas.addEventListener('click', e => onmodelclick(e)); // 监听点击
计算点击坐标,屏幕坐标系转换成世界坐标系的过程。并赋值全局变量点击模型clickModel。
const onmodelclick = event => {
console.log(event);
// 获取鼠标点击位置
mouse.x = (event.clientX / sizes.width) * 2 - 1;
mouse.y = -(event.clientY / sizes.height) * 2 + 1;
console.log(mouse);
raycaster.setFromCamera(mouse, camera);
const intersects = raycaster.intersectObjects(scene.children); // 获取点击到的模型的数组,从近到远排列
// const worldPosition = new THREE.Vector3(); // 初始化一个3D坐标,用来记录模型的世界坐标
if (intersects.length > 0) {
clickModel = intersects[0].object;
outlinePass.selectedObjects = [];
outlinePass.selectedObjects = [clickModel];
}
};
threejs提供了一个扩展库EffectComposer.js,通过这个我们可以实现一些后期处理效果。所谓后期处理,就像ps一样,对threejs的渲染结果进行后期处理,比如添加发光效果。我们结合高亮发光描边可以实现下图发光效果。
import { OutlinePass } from 'three/examples/jsm/postprocessing/OutlinePass';
import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer';
import { ShaderPass } from 'three/examples/jsm/postprocessing/ShaderPass';
import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass';
import { FXAAShader } from 'three/examples/jsm/shaders/FXAAShader';
import { OutputPass } from 'three/examples/jsm/postprocessing/OutputPass';
let composer;
let effectFXAA;
let outlinePass;
const onmodelclick = event => {
...
if (intersects.length > 0) {
outlinePass.selectedObjects = [];
outlinePass.selectedObjects = [clickModel];
}
};
// 效果合成器,shader渲染使用
const initEffectComposer = () => {
// 处理模型闪烁问题【优化展示网格闪烁】
// const parameters = { format: THREE.RGBAFormat };
// const size = renderer.getDrawingBufferSize(new THREE.Vector2());
// const renderTarget = new THREE.WebGLMultipleRenderTargets(size.width, size.height, parameters);
composer = new EffectComposer(renderer);
const renderPass = new RenderPass(scene, camera);
composer.addPass(renderPass);
outlinePass = new OutlinePass(new THREE.Vector2(sizes.width, sizes.height), scene, camera);
outlinePass.visibleEdgeColor.set(255, 255, 0);
outlinePass.edgeStrength = 1.0; // 边框的亮度
outlinePass.edgeGlow = 1; // 光晕[0,1]
outlinePass.usePatternTexture = false; // 是否使用父级的材质
outlinePass.edgeThickness = 1.0; // 边框宽度
outlinePass.downSampleRatio = 1; // 边框弯曲度
composer.addPass(outlinePass);
const outputPass = new OutputPass();
composer.addPass(outputPass);
effectFXAA = new ShaderPass(FXAAShader);
effectFXAA.uniforms.resolution.value.set(1 / sizes.width, 1 / sizes.height);
composer.addPass(effectFXAA);
};
function animation() {
stats.update();
renderer.render(scene, camera);
composer.render();
// mesh.rotateY(0.01);
requestAnimationFrame(animation);
}
如果你对这部分有很多疑问的话,你可以参考这篇文章[3]
我们通过增加点光源来举个例子。
// debugger
let gui;
function initDebugger() {
gui = new GUI();
}
// debugger属性
const debugObject = {
light: {
pointLight: {
color: 0xff0000,
position: {
x: 0,
y: 400,
z: 1800,
},
},
},
};
// 点光源
const pointLight = new THREE.PointLight(debuggerPointLight.color, 1);
pointLight.castShadow = true;
pointLight.position.set(100, 100, 300);
scene.add(pointLight);
const pointLightFolder = lightFolder.addFolder('点光源');
pointLightFolder.addColor(debuggerPointLight, 'color').onChange(function(value) {
pointLight.color.set(value);
});
// 点光源位置
pointLightFolder.add(debuggerPointLight.position, 'x', -1000, 1000).onChange(function(value) {
pointLight.position.x = value;
pointLightHelper.update();
});
pointLightFolder.add(debuggerPointLight.position, 'y', -1000, 1000).onChange(function(value) {
pointLight.position.y = value;
pointLightHelper.update();
});
pointLightFolder.add(debuggerPointLight.position, 'z', -1000, 1000).onChange(function(value) {
pointLight.position.z = value;
pointLightHelper.update();
});
实现效果如图
renderer = new THREE.WebGLRenderer({
...
});
renderer.shadowMap.enabled = true;
const pointLight = new THREE.PointLight(debuggerPointLight.color, 1);
pointLight.castShadow = true;
plane.receiveShadow = true;
loader.load(modelsObj.tunnelWall.obj, object => {
object.traverse(obj => {
if (obj.castShadow !== undefined) {
// 开启投射影响
// eslint-disable-next-line no-param-reassign
obj.castShadow = true;
// 开启被投射阴影
// eslint-disable-next-line no-param-reassign
obj.receiveShadow = true;
}
});
一个计算渲染分辨率FPS的工具,在这里提一下。
import Stats from 'three/examples/jsm/libs/stats.module';
// 性能监视器
let stats;
document.getElementById('threeMain').appendChild(stats.domElement);
function initStats() {
stats = new Stats();
stats.showPanel(1); // 0: fps, 1: ms, 2: mb, 3+: custom
}
function animation() {
stats.update();
renderer.render(scene, camera);
composer.render();
// mesh.rotateY(0.01);
requestAnimationFrame(animation);
}
这是我们实现目标的一个小小起点,属于冰山一角,前路漫漫,还需要阅读很多知识文档和试错阶段,如果你对后续感兴趣的话,可以跟进一下呀!谢谢!
import React, { useEffect } from 'react';
import * as THREE from 'three';
// eslint-disable-next-line import/extensions
import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js';
// eslint-disable-next-line import/extensions
import { OBJLoader } from 'three/examples/jsm/loaders/OBJLoader.js';
import { MTLLoader } from 'three/examples/jsm/loaders/MTLLoader';
import Stats from 'three/examples/jsm/libs/stats.module';
// eslint-disable-next-line import/extensions
import { GUI } from 'three/examples/jsm/libs/lil-gui.module.min.js';
import { OutlinePass } from 'three/examples/jsm/postprocessing/OutlinePass';
import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer';
import { ShaderPass } from 'three/examples/jsm/postprocessing/ShaderPass';
import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass';
import { FXAAShader } from 'three/examples/jsm/shaders/FXAAShader';
import { OutputPass } from 'three/examples/jsm/postprocessing/OutputPass';
export default function ThreeVisual() {
// 场景
let scene;
// 相机
let camera;
// 控制器
let controls;
// 网络模型
let mesh;
// 渲染器
let renderer;
// 性能监视器
let stats;
// debugger
let gui;
// 当前点击模型
let clickModel;
// 当前点击需要使用的
let composer;
let effectFXAA;
let outlinePass;
// debugger属性
const debugObject = {
light: {
amlight: {
color: 0xffffff,
},
directionalLight: {
color: 0xffffff,
position: {
x: 0,
y: 400,
z: 1800,
},
},
pointLight: {
color: 0xff0000,
position: {
x: 0,
y: 400,
z: 1800,
},
},
},
model: {
wall: {
position: {
x: 0,
y: 210,
z: 0,
},
scale: 0.12,
opacity: {
wallTopOpa: 0.4,
wallSideOpa: 1,
},
},
camera: {
position: {
x: 100,
y: 100,
z: 100,
},
scale: 1,
},
},
};
// 模型对象
const modelsObj = {
tunnelWall: {
mtl: '/model/tunnelWall.mtl',
obj: '/model/tunnelWall.obj',
mesh: null,
},
camera: {
mtl: '/model/camera/摄像头方.mtl',
obj: '/model/camera/摄像头方.obj',
mesh: null,
},
};
const sizes = {
width: window.innerWidth,
height: window.innerHeight,
};
// 获取鼠标坐标 处理点击某个模型的事件
const mouse = new THREE.Vector2(); // 初始化一个2D坐标用于存储鼠标位置
const raycaster = new THREE.Raycaster(); // 初始化光线追踪
useEffect(() => {
// eslint-disable-next-line no-use-before-define
threeStart();
}, []);
const initThree = () => {
const width = document.getElementById('threeMain').clientWidth;
const height = document.getElementById('threeMain').clientHeight;
renderer = new THREE.WebGLRenderer({
antialias: true,
logarithmicDepthBuffer: true,
});
renderer.shadowMap.enabled = true;
renderer.setSize(width, height);
document.getElementById('threeMain').appendChild(renderer.domElement);
// renderer.setClearColor(0xFFFFFF, 1.0);
document.getElementById('threeMain').appendChild(stats.domElement);
};
const initCamera = (width, height) => {
camera = new THREE.PerspectiveCamera(45, width / height, 0.1, 10000);
camera.position.x = 0;
camera.position.y = 500;
camera.position.z = 1300;
camera.up.x = 0;
camera.up.y = 1;
camera.up.z = 0;
camera.lookAt({
x: 0,
y: 0,
z: 0,
});
// 创建相机视锥体辅助对象
// const cameraPerspectiveHelper = new THREE.CameraHelper(camera);
// scene.add(cameraPerspectiveHelper);
};
const initScene = () => {
scene = new THREE.Scene();
scene.background = new THREE.Color(0xbfd1e5);
};
const initLight = () => {
const lightFolder = gui.addFolder('光');
const {
directionalLight: debuggerDirectionalLight,
pointLight: debuggerPointLight,
} = debugObject.light;
// 环境光
// const amlight = new THREE.AmbientLight(debugObject.light.amlight.color);
// amlight.position.set(1000, 1000, 1000);
// scene.add(amlight);
// // 环境光debugger
// const amlightFolder=lightFolder.addFolder("环境光")
// amlightFolder.addColor(debugObject.light.amlight, 'color').onChange(function(value){
// amlight.color.set(value);
// });
// 平行光
// 创建平行光,颜色为白色,强度为 10
const directionalLight = new THREE.DirectionalLight(debuggerDirectionalLight.color, 1);
// 设置平行光的方向
directionalLight.position.set(0, 400, 1000);
directionalLight.castShadow = true;
const directonalLightHelper = new THREE.DirectionalLightHelper(directionalLight, 20);
// scene.add(directonalLightHelper);
scene.add(directionalLight);
// 平行光debugger
const directionalLightFolder = lightFolder.addFolder('平行光');
directionalLightFolder.addColor(debuggerDirectionalLight, 'color').onChange(function(value) {
directionalLight.color.set(value);
});
// 平行光位置
directionalLightFolder
.add(debuggerDirectionalLight.position, 'x', -1000, 1000)
.onChange(function(value) {
directionalLight.position.x = value;
directonalLightHelper.update();
});
directionalLightFolder
.add(debuggerDirectionalLight.position, 'y', -1000, 1000)
.onChange(function(value) {
directionalLight.position.y = value;
directonalLightHelper.update();
});
directionalLightFolder
.add(debuggerDirectionalLight.position, 'z', -1000, 1000)
.onChange(function(value) {
directionalLight.position.z = value;
directonalLightHelper.update();
});
// 点光源
const pointLight = new THREE.PointLight(debuggerPointLight.color, 1);
pointLight.castShadow = true;
pointLight.position.set(100, 100, 300);
const sphereSize = 10;
const pointLightHelper = new THREE.PointLightHelper(pointLight, sphereSize);
scene.add(pointLight);
scene.add(pointLightHelper);
const pointLightFolder = lightFolder.addFolder('点光源');
pointLightFolder.addColor(debuggerPointLight, 'color').onChange(function(value) {
pointLight.color.set(value);
});
// 点光源位置
pointLightFolder.add(debuggerPointLight.position, 'x', -1000, 1000).onChange(function(value) {
pointLight.position.x = value;
pointLightHelper.update();
});
pointLightFolder.add(debuggerPointLight.position, 'y', -1000, 1000).onChange(function(value) {
pointLight.position.y = value;
pointLightHelper.update();
});
pointLightFolder.add(debuggerPointLight.position, 'z', -1000, 1000).onChange(function(value) {
pointLight.position.z = value;
pointLightHelper.update();
});
};
const initObject = () => {
const geometry = new THREE.BoxGeometry(3000, 6, 2400);
const loader = new THREE.TextureLoader();
const texture = loader.load('/model/route.png', function(t) {
// eslint-disable-next-line no-param-reassign,no-multi-assign
t.wrapS = t.wrapT = THREE.RepeatWrapping;
t.repeat.set(1, 1);
});
const material = new THREE.MeshBasicMaterial({ color: 0xcccccc });
geometry.position = new THREE.Vector3(0, 0, 0);
mesh = new THREE.Mesh(geometry, [material, material, material, material, material, material]);
mesh.receiveShadow = true; // cast投射,方块投射阴影
scene.add(mesh);
// 平面
const geometryRoute = new THREE.PlaneGeometry(1024, 2400);
const materialRoute = new THREE.MeshStandardMaterial({
map: texture, // 使用纹理贴图
side: THREE.BackSide, // 两面都渲染
});
const plane = new THREE.Mesh(geometryRoute, materialRoute);
plane.receiveShadow = true;
plane.position.set(0, 8, 0);
plane.rotateX(Math.PI / 2);
scene.add(plane);
};
const initControl = () => {
// 将renderer关联到container,这个过程类似于获取canvas元素
const pcanvas = document.getElementById('threeMain');
controls = new OrbitControls(camera, pcanvas);
// 如果使用animate方法时,将此函数删除
// controls.addEventListener( 'change', render );
// 使动画循环使用时阻尼或自转 意思是否有惯性
controls.enableDamping = true;
// 动态阻尼系数 就是鼠标拖拽旋转灵敏度
// controls.dampingFactor = 0.25;
// 是否可以缩放
controls.enableZoom = true;
// 是否自动旋转
// controls.autoRotate = true;
controls.autoRotateSpeed = 0.5;
// 设置相机距离原点的最近距离
// controls.minDistance = 10;
// 设置相机距离原点的最远距离
controls.maxDistance = 10000;
// 是否开启右键拖拽
controls.enablePan = true;
};
const onmodelclick = event => {
console.log(event);
// 获取鼠标点击位置
mouse.x = (event.clientX / sizes.width) * 2 - 1;
mouse.y = -(event.clientY / sizes.height) * 2 + 1;
console.log(mouse);
raycaster.setFromCamera(mouse, camera);
const intersects = raycaster.intersectObjects(scene.children); // 获取点击到的模型的数组,从近到远排列
// const worldPosition = new THREE.Vector3(); // 初始化一个3D坐标,用来记录模型的世界坐标
if (intersects.length > 0) {
clickModel = intersects[0].object;
outlinePass.selectedObjects = [];
outlinePass.selectedObjects = [clickModel];
// intersects[0].object.getWorldPosition(worldPosition); // 将点中的3D模型坐标记录到worldPosition中
// const texture = new THREE.TextureLoader().load("/model/route.png");
// const spriteMaterial = new THREE.SpriteMaterial({
// map: texture,// 设置精灵纹理贴图
// });
// const sprite = new THREE.Sprite(spriteMaterial); // 精灵模型,不管从哪个角度看都可以一直面对你
// scene.add(sprite);
// sprite.scale.set(40,40,40);
// sprite.position.set(worldPosition.x, worldPosition.y + 8, worldPosition.z); // 根据刚才获取的世界坐标设置精灵模型位置,高度加了3,是为了使精灵模型显示在点击模型的上方
}
};
const initEvent = () => {
window.addEventListener('resize', () => {
// Update sizes
sizes.width = window.innerWidth;
sizes.height = window.innerHeight;
// Update camera
camera.aspect = sizes.width / sizes.height;
camera.updateProjectionMatrix();
// Update renderer
renderer.setSize(sizes.width, sizes.height);
composer.setSize(sizes.width, sizes.height);
renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2));
effectFXAA.uniforms.resolution.value.set(1 / sizes.width, 1 / sizes.height);
});
const pcanvas = document.getElementById('threeMain');
// 监听点击事件
pcanvas.addEventListener('click', e => onmodelclick(e)); // 监听点击
};
const loadModel = () => {
const mtlLoader = new MTLLoader();
const loader = new OBJLoader(); // 在init函数中,创建loader变量,用于导入模型
mtlLoader.load(modelsObj.tunnelWall.mtl, material => {
material.preload();
// 设置材质的透明度
// mtl文件中的材质设置到obj加载器
loader.setMaterials(material);
loader.load(modelsObj.tunnelWall.obj, object => {
object.traverse(obj => {
if (obj.castShadow !== undefined) {
// 开启投射影响
// eslint-disable-next-line no-param-reassign
obj.castShadow = true;
// 开启被投射阴影
// eslint-disable-next-line no-param-reassign
obj.receiveShadow = true;
}
});
// 设置模型大小
object.children[0].geometry.computeBoundingBox();
object.children[0].geometry.center();
// debugger模型属性
const { scale, position, opacity } = debugObject.model.wall;
// 模型本有属性
const {
scale: changeScale,
position: changePositon,
material: changeMaterial,
} = object.children[0];
changeScale.set(scale, scale, scale);
changePositon.set(position.x, position.y, position.z);
changeMaterial[0].transparent = true;
changeMaterial[0].opacity = opacity.wallTopOpa;
changeMaterial[1].transparent = true;
changeMaterial[1].opacity = opacity.wallSideOpa;
modelsObj.tunnelWall.mesh = object;
scene.add(object);
// 模型debugger
const modelFolder = gui.addFolder('模型');
const wallFolder = modelFolder.addFolder('墙');
wallFolder
.add(position, 'x', -100, 300)
.step(0.5)
.onChange(function(value) {
changePositon.x = value;
});
wallFolder
.add(position, 'y', -100, 300)
.step(0.5)
.onChange(function(value) {
changePositon.y = value;
});
wallFolder
.add(position, 'z', -100, 300)
.step(0.5)
.onChange(function(value) {
changePositon.z = value;
});
wallFolder
.add(debugObject.model.wall, 'scale', 0.01, 0.3)
.step(0.001)
.onChange(function(value) {
changeScale.set(value, value, value);
});
wallFolder
.add(opacity, 'wallTopOpa', 0, 1)
.step(0.01)
.onChange(function(value) {
changeMaterial[0].opacity = value;
});
wallFolder
.add(opacity, 'wallSideOpa', 0, 1)
.step(0.01)
.onChange(function(value) {
changeMaterial[1].opacity = value;
});
});
});
};
// 克隆摄像头模型
const cloneCameraModel = (cameraSize, lrInterval, baInterval) => {
const group = new THREE.Group();
for (let i = 0; i <= cameraSize; i += 1) {
modelsObj[`camera${i}`] = modelsObj.camera.mesh.clone();
modelsObj[`camera${i}`].position.set(lrInterval, 180, baInterval * (i % 2 === 0 ? -i : i));
modelsObj[`camera${i}`].scale.set(1, 1, 1);
group.add(modelsObj[`camera${i}`])
}
scene.add(group);
};
// 加载摄像头模型
const loadCameraModel = () => {
const mtlLoader = new MTLLoader();
const loader = new OBJLoader(); // 在init函数中,创建loader变量,用于导入模型
mtlLoader.load(modelsObj.camera.mtl, material => {
material.preload();
// 设置材质的透明度
// mtl文件中的材质设置到obj加载器
loader.setMaterials(material);
loader.load(modelsObj.camera.obj, object => {
object.traverse(obj => {
if (obj.castShadow !== undefined) {
// 开启投射影响
// eslint-disable-next-line no-param-reassign
obj.castShadow = true;
// 开启被投射阴影
// eslint-disable-next-line no-param-reassign
obj.receiveShadow = true;
}
});
console.log(object);
// 设置模型大小
object.children[0].geometry.computeBoundingBox();
object.children[0].geometry.center();
// debugger模型属性
object.children[0].scale.set(1, 1, 1);
object.children[0].position.set(100, 100, 100);
modelsObj.camera.mesh = object;
cloneCameraModel(4, 60, 180);
cloneCameraModel(4, -200, 180);
});
});
};
// 效果合成器,shader渲染使用
const initEffectComposer = () => {
// 处理模型闪烁问题【优化展示网格闪烁】
// const parameters = { format: THREE.RGBAFormat };
// const size = renderer.getDrawingBufferSize(new THREE.Vector2());
// const renderTarget = new THREE.WebGLMultipleRenderTargets(size.width, size.height, parameters);
composer = new EffectComposer(renderer);
const renderPass = new RenderPass(scene, camera);
composer.addPass(renderPass);
outlinePass = new OutlinePass(new THREE.Vector2(sizes.width, sizes.height), scene, camera);
outlinePass.visibleEdgeColor.set(255, 255, 0);
outlinePass.edgeStrength = 1.0; // 边框的亮度
outlinePass.edgeGlow = 1; // 光晕[0,1]
outlinePass.usePatternTexture = false; // 是否使用父级的材质
outlinePass.edgeThickness = 1.0; // 边框宽度
outlinePass.downSampleRatio = 1; // 边框弯曲度
composer.addPass(outlinePass);
const outputPass = new OutputPass();
composer.addPass(outputPass);
effectFXAA = new ShaderPass(FXAAShader);
effectFXAA.uniforms.resolution.value.set(1 / sizes.width, 1 / sizes.height);
composer.addPass(effectFXAA);
};
function animation() {
stats.update();
renderer.render(scene, camera);
composer.render();
// mesh.rotateY(0.01);
requestAnimationFrame(animation);
}
function initHelper() {
// const axesHelper = new THREE.AxesHelper(3000);
// scene.add(axesHelper);
}
function initStats() {
stats = new Stats();
stats.showPanel(1); // 0: fps, 1: ms, 2: mb, 3+: custom
}
function initDebugger() {
gui = new GUI();
}
function threeStart() {
initEvent();
initStats();
initDebugger();
initThree();
initScene();
initCamera(sizes.width, sizes.height);
initHelper();
initLight();
initControl();
initObject();
loadModel();
loadCameraModel();
initEffectComposer();
animation();
}
return <div id="threeMain" style={{ width: '100vw', height: '100vh' }} />;
}