Autodesk forge viewer using a material with a image texture - autodesk-forge

I need help on using a material with a image texture
Using the 2.9 viewer files.
I see that it uses revision 71 of three.js
I have created a custom shape. A square out of vertices
I would like to use a image texture material
var shininess = 50, specular = 0x333333, bumpScale = 1, shading = THREE.SmoothShading;
var imgTexture = THREE.ImageUtils.loadTexture( "textures/UV_Grid_Sm.jpg" );
imgTexture.wrapS = imgTexture.wrapT = THREE.RepeatWrapping;
imgTexture.anisotropy = 16;
var material = new THREE.MeshLambertMaterial( { map: imgTexture, color: 0xffffff, shading: shading } );
viewer.impl.matman().addMaterial("FloorMaterial", material, true);
...
// Convert Array to Vector Array and World Space
var vectorWorldArray = convertFloatArrayToVectorWorldArray(viewer,
matrixWorld, floatArray);
var shape = new THREE.Shape(vectorWorldArray);
var geometry = new THREE.ShapeGeometry(shape);
geometry.faces.push( new THREE.Face3( 0, 1, 2 ) );
geometry.faces.push( new THREE.Face3( 2, 3, 0 ) );
geometry.computeFaceNormals();
var mesh = new THREE.Mesh(geometry, material);
var scene = viewer.impl.scene;
// mesh.translateZ( 10 );
scene.add(mesh);
The texture just shows up as all black
Only thing that comes up in chrome (Version 52.0.2743.116 m)
[.Offscreen-For-WebGL-0B70AB98]RENDER WARNING: there is no texture bound to the unit 0
I know that the texture exists
Is there a easier/correcct way of adding the image texture material ?

Looks like lambert needs a light other then what comes with the viewer
I switched to using BasicMaterial now my texture shows up.
var material = new THREE.MeshBasicMaterial( { map: imgTexture, color: 0xffffff, shading: shading } );

Related

WebGL Overlay Google map: How to cast Shadows?

How can I make the 3d objects that I load into the map using The WebGL overlay API, to cast shadows on the map tiles and on other loaded objects?
It seems to me that this is not supported yet( or this feature is removed) so is there any workaround?
Preferred WebGL framework: ThreeJs
A workaround with threeJS would be to put a thin box(it will not work with planeGeometry) on the ground that is completely transparent but receives the shadow. this can be achieved by shadowMaterial
Note: this would only show the shadows from meshes added to the webgloverlay, not from the buildings on the map
We do not have this option yet on google map , because overlay of map we get from "webgloverlayview" do not have receive shadow option.
I tried to create my own plane over google map and achieve the shadow
my plane opacity is very light like 0.3. I also added image below in of colorful plane to explain.
++
Please see #yosan_melese answer it prefect to use
ShadowMaterial
here is example code
import { Loader } from '#googlemaps/js-api-loader';
import * as THREE from 'three';
import {FBXLoader} from 'three/examples/jsm/loaders/FBXLoader.js';
const apiOptions = {
apiKey: 'A***********8',
version: "beta",
map_ids: [""]
};
let directionalLight = '';
const mapOptions = {
"tilt": 0,
"heading": 0,
"zoom": 18,
"center": { lat: 33.57404, lng: 73.1637 },
"mapId": "" ,
"mapTypeId": 'roadmap'
}
/*
roadmap: the default map that you usually see.
satellite: satellite view of Google Maps and Google Earth, when available.
hybrid: a mixture of roadmap and satellite view.
terrain: a map based on terrain information, such as mountains and valleys.
*/
async function initMap() {
const mapDiv = document.getElementById("map");
const apiLoader = new Loader(apiOptions);
await apiLoader.load();
return new google.maps.Map(mapDiv, mapOptions);
}
let scene, renderer, camera, loader,loader1,controls;
function initWebglOverlayView(map) {
const webglOverlayView = new google.maps.WebglOverlayView();
webglOverlayView.onAdd = () => {
// set up the scene
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera();
const ambientLight = new THREE.AmbientLight( 0xffffff, 0.75 ); // soft white light
scene.add(ambientLight);
directionalLight = new THREE.DirectionalLight(0xffffff, 1);
directionalLight.position.x += (-90)
directionalLight.position.y += 0
directionalLight.position.z += 20
directionalLight.castShadow = true
const d = 100;
directionalLight.shadow.camera.left = - d;
directionalLight.shadow.camera.right = d;
directionalLight.shadow.camera.top = d;
directionalLight.shadow.camera.bottom = - d;
scene.add(directionalLight);
scene.add( new THREE.CameraHelper( directionalLight.shadow.camera ) );
// FLOOR
/*
const plane = new THREE.Mesh(new THREE.PlaneGeometry(2050, 2200, 300),
new THREE.MeshPhongMaterial({ color: 0xF3F4F5, opacity: 0.3, transparent: true}));
plane.rotation.x = 0;
plane.rotation.y = 0;
plane.rotation.z = 0;
plane.castShadow = true
plane.receiveShadow = true
scene.add(plane);
*/
//after yosan_melese answer i am using ShadowMaterial
const geometry = new THREE.PlaneGeometry( 2000, 2000 );
geometry.rotateX( - Math.PI / 2 );
const material = new THREE.ShadowMaterial();
material.opacity = 0.5;
const plane = new THREE.Mesh( geometry, material );
plane.rotation.x = 2;
plane.position.x = 0;
plane.position.y = 0;
plane.position.z = 0;
plane.receiveShadow = true;
scene.add( plane );
loader = new FBXLoader();
loader.load( 'model/name_model.fbx', function ( object ) {
object.scale.set( 1, 1, 1 );
object.rotation.x = 1.480;
object.rotation.y = 0.950;
object.rotation.z = 0.070;
object.castShadow = true;
object.receiveShadow = true;
object.name = "name_model";
object.traverse( function ( child ) {
if(child.isMesh ) {
child.castShadow = true;
child.receiveShadow = true;
}
});
scene.add( object );
});
}
webglOverlayView.onContextRestored = (gl) => {
// create the three.js renderer, using the
// maps's WebGL rendering context.
renderer = new THREE.WebGLRenderer({
canvas: gl.canvas,
context: gl,
...gl.getContextAttributes(),
});
renderer.autoClear = false;
renderer.shadowMap.enabled = true;
renderer.shadowMap.type = THREE.PCFSoftShadowMap;
renderer.receiveShadow = true;
renderer.castShadow = true;
}
webglOverlayView.onDraw = (gl, coordinateTransformer) => {
// update camera matrix to ensure the model is georeferenced correctly on the map
const matrix = coordinateTransformer.fromLatLngAltitude(mapOptions.center, 10);
camera.projectionMatrix = new THREE.Matrix4().fromArray(matrix);
webglOverlayView.requestRedraw();
renderer.render(scene, camera);
// always reset the GL state
renderer.resetState();
}
webglOverlayView.setMap(map);
}
(async () => {
const map = await initMap();
initWebglOverlayView(map);
})();
after #yosan_melese code

How to get states of the gltf texture rendered with cesium

I am the newbie and have a 3dsmax model that converted to gltf with lots of texture images, after loading with cesium and flyto the model, the white bone loaded first, then it'll take long time to render the texture. I want to show a loading image before all textures rendered. Is there anyway to get the texture rendering states?
If you're using the model directly as a graphics primitive (as opposed to using it on a Cesium Entity), then there's a Model.readyPromise that will tell you when the model has finished loading.
Here's a Sandcastle Demo for Cesium 1.54:
var viewer = new Cesium.Viewer('cesiumContainer');
var scene = viewer.scene;
var model;
var modelUrl = '../../../../Apps/SampleData/models/GroundVehicle/GroundVehicle.glb';
var height = 0.0;
var heading = 0.0, pitch = 0.0, roll = 0.0;
var hpr = new Cesium.HeadingPitchRoll(heading, pitch, roll);
var origin = Cesium.Cartesian3.fromDegrees(-123.0744619, 44.0503706, height);
var modelMatrix = Cesium.Transforms.headingPitchRollToFixedFrame(origin, hpr);
scene.primitives.removeAll(); // Remove previous model
model = scene.primitives.add(Cesium.Model.fromGltf({
url : modelUrl,
modelMatrix: modelMatrix
}));
console.log('Model is loading...');
model.readyPromise.then(function(model) {
console.log('Model loading complete.');
// Zoom to model
var camera = viewer.camera;
var controller = scene.screenSpaceCameraController;
var r = 2.0 * Math.max(model.boundingSphere.radius, camera.frustum.near);
controller.minimumZoomDistance = r * 0.5;
var center = Cesium.Matrix4.multiplyByPoint(model.modelMatrix, model.boundingSphere.center, new Cesium.Cartesian3());
var heading = Cesium.Math.toRadians(230.0);
var pitch = Cesium.Math.toRadians(-20.0);
camera.lookAt(center, new Cesium.HeadingPitchRange(heading, pitch, r * 2.0));
}).otherwise(function(error){
console.error(error);
});

How to smooth mesh triangles in STL loaded BufferGeometry

I´m trying to load some STL files using Three.js. The models are loaded correctly, but there are too many triangles that I would like to merge/smooth.
I had successfully applied smooth loading terrains in other 3D formats, but I can´t do it with the BufferGeometry that results from loading an STL file with the STLLoader.
_
var material = new THREE.MeshLambertMaterial( { ... } );
var path = "./models/budah.stl";
var loader = new THREE.STLLoader();
loader.load( path, function ( object ) {
object.computeBoundingBox();
object.computeBoundingSphere();
object.computeFaceNormals();
object.computeVertexNormals();
object.normalizeNormals();
object.center();
// Apply smooth
var modifier = new THREE.SubdivisionModifier( 1);
var smooth = smooth = object.clone();
smooth.mergeVertices();
smooth.computeFaceNormals();
smooth.computeVertexNormals();
modifier.modify( smooth );
scene.add( smooth );
});
This is what I tried, it throws an error: Uncaught TypeError: smooth.mergeVertices is not a function
If I comment the "mergeVertices()" line, what I get is a different error: Uncaught TypeError: Cannot read property 'length' of undefined in SubdivisionsModifier, line 156.
It seems that the sample codes I´m trying are outdated (this is happenning a lot recently due to the massive changes in the Three.JS library). Or maybe I´m forgetting something. The fact is that the vertices seems to be null..?
Thanks in advance!
It seems I was looking in the wrong direction: smoothing the triangles has nothing to do with the SubdivisionsModifier... What I needed was easier than that, just compute the vertex BEFORE applying the material, so it can use SmoothShading instead of FlatShading (did I got it right?).
The problem here was that the BufferGeometry returned by the STLLoader has not calculated vertices/vertex, so I had to do it manually. After that, apply mergeVertices() just before computeVertexNormals() and voilà! The triangles dissappear and everything is smooth:
var material = new THREE.MeshLambertMaterial( { ... } );
var path = "./models/budah.stl";
var loader = new THREE.STLLoader();
loader.load( path, function ( object ) {
object.computeBoundingBox();
object.computeVertexNormals();
object.center();
///////////////////////////////////////////////////////////////
var attrib = object.getAttribute('position');
if(attrib === undefined) {
throw new Error('a given BufferGeometry object must have a position attribute.');
}
var positions = attrib.array;
var vertices = [];
for(var i = 0, n = positions.length; i < n; i += 3) {
var x = positions[i];
var y = positions[i + 1];
var z = positions[i + 2];
vertices.push(new THREE.Vector3(x, y, z));
}
var faces = [];
for(var i = 0, n = vertices.length; i < n; i += 3) {
faces.push(new THREE.Face3(i, i + 1, i + 2));
}
var geometry = new THREE.Geometry();
geometry.vertices = vertices;
geometry.faces = faces;
geometry.computeFaceNormals();
geometry.mergeVertices()
geometry.computeVertexNormals();
///////////////////////////////////////////////////////////////
var mesh = new THREE.Mesh(geometry, material);
scene.add( mesh );
});
Than, you can convert it back to BufferGeometry, because it's more GPU/CPU efficient for more complex models:
var geometry = new THREE.Geometry();
geometry.vertices = vertices;
geometry.faces = faces;
geometry.computeFaceNormals();
geometry.mergeVertices();
geometry.computeVertexNormals();
var buffer_g = new THREE.BufferGeometry();
buffer_g.fromGeometry(geometry);
var mesh = new THREE.Mesh(buffer_g, material);
scene.add( mesh )
Happened this issue for me while loading an obj file. If you have a 3d software like 3dsmax:
Open the obj file,
Go to polygons selection mode and select all polygons.
Under the Surface properties panel, click 'Auto Smooth' button.
Export the model back to obj format
Now you won't have to call the functions geometry.mergeVertices() and geometry.computeVertexNormals();. Just load the obj and add to the scene, mesh will be smooth.
EDIT:
My obj files had meshphongmaterial by default and on changing the shading property to value 2 the mesh became smooth.
child.material.shading = 2
STL does not support vertex index.
That is reason it has duplicated vertex of all triangles.
Each vertex has its normal as triangle normal.
As a result, at same position( multiple very closed vertices), there is multiple normal value.
This leads to non-smooth surface of geometry when using Normal for lighting calculation.

how to apply physics using physi.js with mirror.js on the plane in three.js

I create mirror plane using mirror.js in three.js.
now i want to apply gravity over that plane. i have try this code but get error
var planeGeo = new THREE.PlaneBufferGeometry( 300, 300 );
var groundMirror = new THREE.Mirror( renderer, camera, { clipBias: 0.003, textureWidth: WIDTH, textureHeight: HEIGHT, color: 0x777777 } );
var Material = new Physijs.createMaterial(groundMirror);
var cube = new Physijs.BoxMesh(planeGeo,Material,0 );
cube.add( groundMirror );
scene.add(cube);
CubeCamera is a bit of a waste for a single plane, you could always use a single perspective camera and align it with your plane. The basic idea is to take plane normal, position your camera behind the plane in aligned on the normal going through the center of the plane, and orient it (rotate) to be aligned with the normal. Make sure you set near value of your camera to be the same as distance between the camera and the plane, so that reflections can be captured at to the point of contact. Question on scale is resolved by ensuring your viewport is the same size at the plane.
For reference, have a look at he CubeCamera source code:
https://github.com/mrdoob/three.js/blob/master/src/cameras/CubeCamera.js
EDIT:
After looking through examples again, i found that Slayvin has implemented just that:
http://threejs.org/examples/webgl_mirror.html
here's a relevant snippet:
var planeGeo = new THREE.PlaneBufferGeometry( 100.1, 100.1 );
// MIRORR planes
groundMirror = new THREE.Mirror( renderer, camera, { clipBias: 0.003, textureWidth: WIDTH, textureHeight: HEIGHT, color: 0x777777 } );
var mirrorMesh = new THREE.Mesh( planeGeo, groundMirror.material );
mirrorMesh.add( groundMirror );
mirrorMesh.rotateX( - Math.PI / 2 );
scene.add( mirrorMesh );
correct it by changing
var Material = new Physijs.createMaterial(groundMirror.material);
var planeGeo = new THREE.PlaneGeometry( 300, 300 );
var groundMirror = new THREE.Mirror( renderer, camera, { clipBias: 0.003, textureWidth: WIDTH, textureHeight: HEIGHT, color: 0x777777 } );
var Material = new Physijs.createMaterial(groundMirror.material);
var cube = new Physijs.BoxMesh(planeGeo,Material,0 );
cube.rotateX( - Math.PI / 2 );
cube.add( groundMirror );

Three.js does not show texture on sphere

There are some threads about textures which do not showing up. I have tried them all, but nothing helped.
I have spent a few hours on this now. Every time I end up looking at a black sphere.
I am working on Chrome v18 and Windows 7. I also tried Firefox, but this browser does not really support Three.js.
This is the body of the script:
<body>
<script src="../build/Three.js"></script>
<script src="js/Stats.js"></script>
<script src="../build/jquery-1.7.2.min.js"></script>
This is the script itself:
// stap1) camera, set the scene size
var WIDTH = 400,
HEIGHT = 300;
// set some camera attributes
var VIEW_ANGLE = 45,
ASPECT = WIDTH / HEIGHT,
NEAR = 0.1,
FAR = 10000;
var camera = new THREE.PerspectiveCamera(
VIEW_ANGLE,
ASPECT,
NEAR,
FAR );
// stap2) scene:
var scene = new THREE.Scene();
// the camera starts at 0,0,0 so pull it back
scene.add(camera);
camera.position.z = +300;
// get the DOM element to attach to
// - assume we've got jQuery to hand
var container = $('#container');
// stap3)create a WebGL renderer:
var renderer = new THREE.WebGLRenderer();
// start the renderer
renderer.setSize(WIDTH, HEIGHT);
// attach the render-supplied DOM element
container.append(renderer.domElement);
// bol maken:
// create the sphere's material
// b.v: THREE.MeshBasicMaterial
var sphereMaterial = new THREE.MeshLambertMaterial(
{
map: THREE.ImageUtils.loadTexture("http://dev.root.nl/tree/examples/textures/ash_uvgrid01.jpg")
});
// set up the sphere vars
var radius = 50, segments = 16, rings = 16;
var sphereGeometry = new THREE.SphereGeometry(radius, segments, rings);
// create a new mesh with sphere geometry -
var sphere = new THREE.Mesh(
sphereGeometry,
sphereMaterial
);
sphere.position.x=0;
var s=1;
sphere.scale.set(s, s, s);
// add the sphere to the scene
scene.add(sphere);
// create a point light
var pointLight = new THREE.PointLight( 0xFFFFFF );
// set its position
pointLight.position.x = 10;
pointLight.position.y = 50;
pointLight.position.z = 130;
// add to the scene
scene.add(pointLight);
// draw!
renderer.render(scene, camera);
You need to wait until the image used as texture is fully downloaded.
I have put your code on the web: http://jsfiddle.net/4Qg7K/ and just added a classic "render loop":
requestAnimationFrame(render);
function render(){
requestAnimationFrame(render);
sphere.rotation.y += 0.005; //rotation stuff, just for fun
renderer.render(scene, camera);
};
requestAnimationFrame function works like a timer, calling to the render function each time the browser is ready to update the web page.
BTW, Three.js works fine with Firefox.