Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_gltf/src/loader/mod.rs
6849 views
1
mod extensions;
2
mod gltf_ext;
3
4
use alloc::sync::Arc;
5
use std::{
6
io::Error,
7
path::{Path, PathBuf},
8
sync::Mutex,
9
};
10
11
#[cfg(feature = "bevy_animation")]
12
use bevy_animation::{prelude::*, AnimationTarget, AnimationTargetId};
13
use bevy_asset::{
14
io::Reader, AssetLoadError, AssetLoader, Handle, LoadContext, ReadAssetBytesError,
15
RenderAssetUsages,
16
};
17
use bevy_camera::{
18
primitives::Aabb, visibility::Visibility, Camera, Camera3d, OrthographicProjection,
19
PerspectiveProjection, Projection, ScalingMode,
20
};
21
use bevy_color::{Color, LinearRgba};
22
use bevy_ecs::{
23
entity::{Entity, EntityHashMap},
24
hierarchy::ChildSpawner,
25
name::Name,
26
world::World,
27
};
28
use bevy_image::{
29
CompressedImageFormats, Image, ImageLoaderSettings, ImageSampler, ImageSamplerDescriptor,
30
ImageType, TextureError,
31
};
32
use bevy_light::{DirectionalLight, PointLight, SpotLight};
33
use bevy_math::{Mat4, Vec3};
34
use bevy_mesh::{
35
morph::{MeshMorphWeights, MorphAttributes, MorphTargetImage, MorphWeights},
36
skinning::{SkinnedMesh, SkinnedMeshInverseBindposes},
37
Indices, Mesh, Mesh3d, MeshVertexAttribute, PrimitiveTopology,
38
};
39
#[cfg(feature = "pbr_transmission_textures")]
40
use bevy_pbr::UvChannel;
41
use bevy_pbr::{MeshMaterial3d, StandardMaterial, MAX_JOINTS};
42
use bevy_platform::collections::{HashMap, HashSet};
43
use bevy_render::render_resource::Face;
44
use bevy_scene::Scene;
45
#[cfg(not(target_arch = "wasm32"))]
46
use bevy_tasks::IoTaskPool;
47
use bevy_transform::components::Transform;
48
49
use gltf::{
50
accessor::Iter,
51
image::Source,
52
mesh::{util::ReadIndices, Mode},
53
Document, Material, Node, Semantic,
54
};
55
56
use serde::{Deserialize, Serialize};
57
#[cfg(feature = "bevy_animation")]
58
use smallvec::SmallVec;
59
60
use thiserror::Error;
61
use tracing::{error, info_span, warn};
62
63
use crate::{
64
vertex_attributes::convert_attribute, Gltf, GltfAssetLabel, GltfExtras, GltfMaterialExtras,
65
GltfMaterialName, GltfMeshExtras, GltfMeshName, GltfNode, GltfSceneExtras, GltfSkin,
66
};
67
68
#[cfg(feature = "bevy_animation")]
69
use self::gltf_ext::scene::collect_path;
70
use self::{
71
extensions::{AnisotropyExtension, ClearcoatExtension, SpecularExtension},
72
gltf_ext::{
73
check_for_cycles, get_linear_textures,
74
material::{
75
alpha_mode, material_label, needs_tangents, uv_channel,
76
warn_on_differing_texture_transforms,
77
},
78
mesh::{primitive_name, primitive_topology},
79
scene::{node_name, node_transform},
80
texture::{texture_handle, texture_sampler, texture_transform_to_affine2},
81
},
82
};
83
use crate::convert_coordinates::ConvertCoordinates as _;
84
85
/// An error that occurs when loading a glTF file.
86
#[derive(Error, Debug)]
87
pub enum GltfError {
88
/// Unsupported primitive mode.
89
#[error("unsupported primitive mode")]
90
UnsupportedPrimitive {
91
/// The primitive mode.
92
mode: Mode,
93
},
94
/// Invalid glTF file.
95
#[error("invalid glTF file: {0}")]
96
Gltf(#[from] gltf::Error),
97
/// Binary blob is missing.
98
#[error("binary blob is missing")]
99
MissingBlob,
100
/// Decoding the base64 mesh data failed.
101
#[error("failed to decode base64 mesh data")]
102
Base64Decode(#[from] base64::DecodeError),
103
/// Unsupported buffer format.
104
#[error("unsupported buffer format")]
105
BufferFormatUnsupported,
106
/// Invalid image mime type.
107
#[error("invalid image mime type: {0}")]
108
#[from(ignore)]
109
InvalidImageMimeType(String),
110
/// Error when loading a texture. Might be due to a disabled image file format feature.
111
#[error("You may need to add the feature for the file format: {0}")]
112
ImageError(#[from] TextureError),
113
/// Failed to read bytes from an asset path.
114
#[error("failed to read bytes from an asset path: {0}")]
115
ReadAssetBytesError(#[from] ReadAssetBytesError),
116
/// Failed to load asset from an asset path.
117
#[error("failed to load asset from an asset path: {0}")]
118
AssetLoadError(#[from] AssetLoadError),
119
/// Missing sampler for an animation.
120
#[error("Missing sampler for animation {0}")]
121
#[from(ignore)]
122
MissingAnimationSampler(usize),
123
/// Failed to generate tangents.
124
#[error("failed to generate tangents: {0}")]
125
GenerateTangentsError(#[from] bevy_mesh::GenerateTangentsError),
126
/// Failed to generate morph targets.
127
#[error("failed to generate morph targets: {0}")]
128
MorphTarget(#[from] bevy_mesh::morph::MorphBuildError),
129
/// Circular children in Nodes
130
#[error("GLTF model must be a tree, found cycle instead at node indices: {0:?}")]
131
#[from(ignore)]
132
CircularChildren(String),
133
/// Failed to load a file.
134
#[error("failed to load file: {0}")]
135
Io(#[from] Error),
136
}
137
138
/// Loads glTF files with all of their data as their corresponding bevy representations.
139
pub struct GltfLoader {
140
/// List of compressed image formats handled by the loader.
141
pub supported_compressed_formats: CompressedImageFormats,
142
/// Custom vertex attributes that will be recognized when loading a glTF file.
143
///
144
/// Keys must be the attribute names as found in the glTF data, which must start with an underscore.
145
/// See [this section of the glTF specification](https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#meshes-overview)
146
/// for additional details on custom attributes.
147
pub custom_vertex_attributes: HashMap<Box<str>, MeshVertexAttribute>,
148
/// Arc to default [`ImageSamplerDescriptor`].
149
pub default_sampler: Arc<Mutex<ImageSamplerDescriptor>>,
150
/// How to convert glTF coordinates on import. Assuming glTF cameras, glTF lights, and glTF meshes had global identity transforms,
151
/// their Bevy [`Transform::forward`](bevy_transform::components::Transform::forward) will be pointing in the following global directions:
152
/// - When set to `false`
153
/// - glTF cameras and glTF lights: global -Z,
154
/// - glTF models: global +Z.
155
/// - When set to `true`
156
/// - glTF cameras and glTF lights: global +Z,
157
/// - glTF models: global -Z.
158
///
159
/// The default is `false`.
160
pub default_use_model_forward_direction: bool,
161
}
162
163
/// Specifies optional settings for processing gltfs at load time. By default, all recognized contents of
164
/// the gltf will be loaded.
165
///
166
/// # Example
167
///
168
/// To load a gltf but exclude the cameras, replace a call to `asset_server.load("my.gltf")` with
169
/// ```no_run
170
/// # use bevy_asset::{AssetServer, Handle};
171
/// # use bevy_gltf::*;
172
/// # let asset_server: AssetServer = panic!();
173
/// let gltf_handle: Handle<Gltf> = asset_server.load_with_settings(
174
/// "my.gltf",
175
/// |s: &mut GltfLoaderSettings| {
176
/// s.load_cameras = false;
177
/// }
178
/// );
179
/// ```
180
#[derive(Serialize, Deserialize)]
181
pub struct GltfLoaderSettings {
182
/// If empty, the gltf mesh nodes will be skipped.
183
///
184
/// Otherwise, nodes will be loaded and retained in RAM/VRAM according to the active flags.
185
pub load_meshes: RenderAssetUsages,
186
/// If empty, the gltf materials will be skipped.
187
///
188
/// Otherwise, materials will be loaded and retained in RAM/VRAM according to the active flags.
189
pub load_materials: RenderAssetUsages,
190
/// If true, the loader will spawn cameras for gltf camera nodes.
191
pub load_cameras: bool,
192
/// If true, the loader will spawn lights for gltf light nodes.
193
pub load_lights: bool,
194
/// If true, the loader will load `AnimationClip` assets, and also add
195
/// `AnimationTarget` and `AnimationPlayer` components to hierarchies
196
/// affected by animation. Requires the `bevy_animation` feature.
197
pub load_animations: bool,
198
/// If true, the loader will include the root of the gltf root node.
199
pub include_source: bool,
200
/// Overrides the default sampler. Data from sampler node is added on top of that.
201
///
202
/// If None, uses the global default which is stored in the [`DefaultGltfImageSampler`](crate::DefaultGltfImageSampler) resource.
203
pub default_sampler: Option<ImageSamplerDescriptor>,
204
/// If true, the loader will ignore sampler data from gltf and use the default sampler.
205
pub override_sampler: bool,
206
/// _CAUTION: This is an experimental feature with [known issues](https://github.com/bevyengine/bevy/issues/20621). Behavior may change in future versions._
207
///
208
/// How to convert glTF coordinates on import. Assuming glTF cameras, glTF lights, and glTF meshes had global unit transforms,
209
/// their Bevy [`Transform::forward`](bevy_transform::components::Transform::forward) will be pointing in the following global directions:
210
/// - When set to `false`
211
/// - glTF cameras and glTF lights: global -Z,
212
/// - glTF models: global +Z.
213
/// - When set to `true`
214
/// - glTF cameras and glTF lights: global +Z,
215
/// - glTF models: global -Z.
216
///
217
/// If `None`, uses the global default set by [`GltfPlugin::use_model_forward_direction`](crate::GltfPlugin::use_model_forward_direction).
218
pub use_model_forward_direction: Option<bool>,
219
}
220
221
impl Default for GltfLoaderSettings {
222
fn default() -> Self {
223
Self {
224
load_meshes: RenderAssetUsages::default(),
225
load_materials: RenderAssetUsages::default(),
226
load_cameras: true,
227
load_lights: true,
228
load_animations: true,
229
include_source: false,
230
default_sampler: None,
231
override_sampler: false,
232
use_model_forward_direction: None,
233
}
234
}
235
}
236
237
impl GltfLoader {
238
/// Loads an entire glTF file.
239
pub async fn load_gltf<'a, 'b, 'c>(
240
loader: &GltfLoader,
241
bytes: &'a [u8],
242
load_context: &'b mut LoadContext<'c>,
243
settings: &'b GltfLoaderSettings,
244
) -> Result<Gltf, GltfError> {
245
let gltf = gltf::Gltf::from_slice(bytes)?;
246
247
let file_name = load_context
248
.asset_path()
249
.path()
250
.to_str()
251
.ok_or(GltfError::Gltf(gltf::Error::Io(Error::new(
252
std::io::ErrorKind::InvalidInput,
253
"Gltf file name invalid",
254
))))?
255
.to_string();
256
let buffer_data = load_buffers(&gltf, load_context).await?;
257
258
let linear_textures = get_linear_textures(&gltf.document);
259
260
#[cfg(feature = "bevy_animation")]
261
let paths = if settings.load_animations {
262
let mut paths = HashMap::<usize, (usize, Vec<Name>)>::default();
263
for scene in gltf.scenes() {
264
for node in scene.nodes() {
265
let root_index = node.index();
266
collect_path(&node, &[], &mut paths, root_index, &mut HashSet::default());
267
}
268
}
269
paths
270
} else {
271
Default::default()
272
};
273
274
let convert_coordinates = match settings.use_model_forward_direction {
275
Some(convert_coordinates) => convert_coordinates,
276
None => loader.default_use_model_forward_direction,
277
};
278
279
#[cfg(feature = "bevy_animation")]
280
let (animations, named_animations, animation_roots) = if settings.load_animations {
281
use bevy_animation::{
282
animated_field, animation_curves::*, gltf_curves::*, VariableCurve,
283
};
284
use bevy_math::{
285
curve::{ConstantCurve, Interval, UnevenSampleAutoCurve},
286
Quat, Vec4,
287
};
288
use gltf::animation::util::ReadOutputs;
289
let mut animations = vec![];
290
let mut named_animations = <HashMap<_, _>>::default();
291
let mut animation_roots = <HashSet<_>>::default();
292
for animation in gltf.animations() {
293
let mut animation_clip = AnimationClip::default();
294
for channel in animation.channels() {
295
let node = channel.target().node();
296
let interpolation = channel.sampler().interpolation();
297
let reader = channel.reader(|buffer| Some(&buffer_data[buffer.index()]));
298
let keyframe_timestamps: Vec<f32> = if let Some(inputs) = reader.read_inputs() {
299
match inputs {
300
Iter::Standard(times) => times.collect(),
301
Iter::Sparse(_) => {
302
warn!("Sparse accessor not supported for animation sampler input");
303
continue;
304
}
305
}
306
} else {
307
warn!("Animations without a sampler input are not supported");
308
return Err(GltfError::MissingAnimationSampler(animation.index()));
309
};
310
311
if keyframe_timestamps.is_empty() {
312
warn!("Tried to load animation with no keyframe timestamps");
313
continue;
314
}
315
316
let maybe_curve: Option<VariableCurve> = if let Some(outputs) =
317
reader.read_outputs()
318
{
319
match outputs {
320
ReadOutputs::Translations(tr) => {
321
let translation_property = animated_field!(Transform::translation);
322
let translations: Vec<Vec3> = tr
323
.map(Vec3::from)
324
.map(|verts| {
325
if convert_coordinates {
326
Vec3::convert_coordinates(verts)
327
} else {
328
verts
329
}
330
})
331
.collect();
332
if keyframe_timestamps.len() == 1 {
333
Some(VariableCurve::new(AnimatableCurve::new(
334
translation_property,
335
ConstantCurve::new(Interval::EVERYWHERE, translations[0]),
336
)))
337
} else {
338
match interpolation {
339
gltf::animation::Interpolation::Linear => {
340
UnevenSampleAutoCurve::new(
341
keyframe_timestamps.into_iter().zip(translations),
342
)
343
.ok()
344
.map(
345
|curve| {
346
VariableCurve::new(AnimatableCurve::new(
347
translation_property,
348
curve,
349
))
350
},
351
)
352
}
353
gltf::animation::Interpolation::Step => {
354
SteppedKeyframeCurve::new(
355
keyframe_timestamps.into_iter().zip(translations),
356
)
357
.ok()
358
.map(
359
|curve| {
360
VariableCurve::new(AnimatableCurve::new(
361
translation_property,
362
curve,
363
))
364
},
365
)
366
}
367
gltf::animation::Interpolation::CubicSpline => {
368
CubicKeyframeCurve::new(
369
keyframe_timestamps,
370
translations,
371
)
372
.ok()
373
.map(
374
|curve| {
375
VariableCurve::new(AnimatableCurve::new(
376
translation_property,
377
curve,
378
))
379
},
380
)
381
}
382
}
383
}
384
}
385
ReadOutputs::Rotations(rots) => {
386
let rotation_property = animated_field!(Transform::rotation);
387
let rotations: Vec<Quat> = rots
388
.into_f32()
389
.map(Quat::from_array)
390
.map(|quat| {
391
if convert_coordinates {
392
Quat::convert_coordinates(quat)
393
} else {
394
quat
395
}
396
})
397
.collect();
398
if keyframe_timestamps.len() == 1 {
399
Some(VariableCurve::new(AnimatableCurve::new(
400
rotation_property,
401
ConstantCurve::new(Interval::EVERYWHERE, rotations[0]),
402
)))
403
} else {
404
match interpolation {
405
gltf::animation::Interpolation::Linear => {
406
UnevenSampleAutoCurve::new(
407
keyframe_timestamps.into_iter().zip(rotations),
408
)
409
.ok()
410
.map(
411
|curve| {
412
VariableCurve::new(AnimatableCurve::new(
413
rotation_property,
414
curve,
415
))
416
},
417
)
418
}
419
gltf::animation::Interpolation::Step => {
420
SteppedKeyframeCurve::new(
421
keyframe_timestamps.into_iter().zip(rotations),
422
)
423
.ok()
424
.map(
425
|curve| {
426
VariableCurve::new(AnimatableCurve::new(
427
rotation_property,
428
curve,
429
))
430
},
431
)
432
}
433
gltf::animation::Interpolation::CubicSpline => {
434
CubicRotationCurve::new(
435
keyframe_timestamps,
436
rotations.into_iter().map(Vec4::from),
437
)
438
.ok()
439
.map(
440
|curve| {
441
VariableCurve::new(AnimatableCurve::new(
442
rotation_property,
443
curve,
444
))
445
},
446
)
447
}
448
}
449
}
450
}
451
ReadOutputs::Scales(scale) => {
452
let scale_property = animated_field!(Transform::scale);
453
let scales: Vec<Vec3> = scale.map(Vec3::from).collect();
454
if keyframe_timestamps.len() == 1 {
455
Some(VariableCurve::new(AnimatableCurve::new(
456
scale_property,
457
ConstantCurve::new(Interval::EVERYWHERE, scales[0]),
458
)))
459
} else {
460
match interpolation {
461
gltf::animation::Interpolation::Linear => {
462
UnevenSampleAutoCurve::new(
463
keyframe_timestamps.into_iter().zip(scales),
464
)
465
.ok()
466
.map(
467
|curve| {
468
VariableCurve::new(AnimatableCurve::new(
469
scale_property,
470
curve,
471
))
472
},
473
)
474
}
475
gltf::animation::Interpolation::Step => {
476
SteppedKeyframeCurve::new(
477
keyframe_timestamps.into_iter().zip(scales),
478
)
479
.ok()
480
.map(
481
|curve| {
482
VariableCurve::new(AnimatableCurve::new(
483
scale_property,
484
curve,
485
))
486
},
487
)
488
}
489
gltf::animation::Interpolation::CubicSpline => {
490
CubicKeyframeCurve::new(keyframe_timestamps, scales)
491
.ok()
492
.map(|curve| {
493
VariableCurve::new(AnimatableCurve::new(
494
scale_property,
495
curve,
496
))
497
})
498
}
499
}
500
}
501
}
502
ReadOutputs::MorphTargetWeights(weights) => {
503
let weights: Vec<f32> = weights.into_f32().collect();
504
if keyframe_timestamps.len() == 1 {
505
#[expect(
506
clippy::unnecessary_map_on_constructor,
507
reason = "While the mapping is unnecessary, it is much more readable at this level of indentation. Additionally, mapping makes it more consistent with the other branches."
508
)]
509
Some(ConstantCurve::new(Interval::EVERYWHERE, weights))
510
.map(WeightsCurve)
511
.map(VariableCurve::new)
512
} else {
513
match interpolation {
514
gltf::animation::Interpolation::Linear => {
515
WideLinearKeyframeCurve::new(
516
keyframe_timestamps,
517
weights,
518
)
519
.ok()
520
.map(WeightsCurve)
521
.map(VariableCurve::new)
522
}
523
gltf::animation::Interpolation::Step => {
524
WideSteppedKeyframeCurve::new(
525
keyframe_timestamps,
526
weights,
527
)
528
.ok()
529
.map(WeightsCurve)
530
.map(VariableCurve::new)
531
}
532
gltf::animation::Interpolation::CubicSpline => {
533
WideCubicKeyframeCurve::new(
534
keyframe_timestamps,
535
weights,
536
)
537
.ok()
538
.map(WeightsCurve)
539
.map(VariableCurve::new)
540
}
541
}
542
}
543
}
544
}
545
} else {
546
warn!("Animations without a sampler output are not supported");
547
return Err(GltfError::MissingAnimationSampler(animation.index()));
548
};
549
550
let Some(curve) = maybe_curve else {
551
warn!(
552
"Invalid keyframe data for node {}; curve could not be constructed",
553
node.index()
554
);
555
continue;
556
};
557
558
if let Some((root_index, path)) = paths.get(&node.index()) {
559
animation_roots.insert(*root_index);
560
animation_clip.add_variable_curve_to_target(
561
AnimationTargetId::from_names(path.iter()),
562
curve,
563
);
564
} else {
565
warn!(
566
"Animation ignored for node {}: part of its hierarchy is missing a name",
567
node.index()
568
);
569
}
570
}
571
let handle = load_context.add_labeled_asset(
572
GltfAssetLabel::Animation(animation.index()).to_string(),
573
animation_clip,
574
);
575
if let Some(name) = animation.name() {
576
named_animations.insert(name.into(), handle.clone());
577
}
578
animations.push(handle);
579
}
580
(animations, named_animations, animation_roots)
581
} else {
582
Default::default()
583
};
584
585
let default_sampler = match settings.default_sampler.as_ref() {
586
Some(sampler) => sampler,
587
None => &loader.default_sampler.lock().unwrap().clone(),
588
};
589
// We collect handles to ensure loaded images from paths are not unloaded before they are used elsewhere
590
// in the loader. This prevents "reloads", but it also prevents dropping the is_srgb context on reload.
591
//
592
// In theory we could store a mapping between texture.index() and handle to use
593
// later in the loader when looking up handles for materials. However this would mean
594
// that the material's load context would no longer track those images as dependencies.
595
let mut _texture_handles = Vec::new();
596
if gltf.textures().len() == 1 || cfg!(target_arch = "wasm32") {
597
for texture in gltf.textures() {
598
let parent_path = load_context.path().parent().unwrap();
599
let image = load_image(
600
texture,
601
&buffer_data,
602
&linear_textures,
603
parent_path,
604
loader.supported_compressed_formats,
605
default_sampler,
606
settings,
607
)
608
.await?;
609
image.process_loaded_texture(load_context, &mut _texture_handles);
610
}
611
} else {
612
#[cfg(not(target_arch = "wasm32"))]
613
IoTaskPool::get()
614
.scope(|scope| {
615
gltf.textures().for_each(|gltf_texture| {
616
let parent_path = load_context.path().parent().unwrap();
617
let linear_textures = &linear_textures;
618
let buffer_data = &buffer_data;
619
scope.spawn(async move {
620
load_image(
621
gltf_texture,
622
buffer_data,
623
linear_textures,
624
parent_path,
625
loader.supported_compressed_formats,
626
default_sampler,
627
settings,
628
)
629
.await
630
});
631
});
632
})
633
.into_iter()
634
.for_each(|result| match result {
635
Ok(image) => {
636
image.process_loaded_texture(load_context, &mut _texture_handles);
637
}
638
Err(err) => {
639
warn!("Error loading glTF texture: {}", err);
640
}
641
});
642
}
643
644
let mut materials = vec![];
645
let mut named_materials = <HashMap<_, _>>::default();
646
// Only include materials in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_materials flag
647
if !settings.load_materials.is_empty() {
648
// NOTE: materials must be loaded after textures because image load() calls will happen before load_with_settings, preventing is_srgb from being set properly
649
for material in gltf.materials() {
650
let handle = load_material(&material, load_context, &gltf.document, false);
651
if let Some(name) = material.name() {
652
named_materials.insert(name.into(), handle.clone());
653
}
654
materials.push(handle);
655
}
656
}
657
let mut meshes = vec![];
658
let mut named_meshes = <HashMap<_, _>>::default();
659
let mut meshes_on_skinned_nodes = <HashSet<_>>::default();
660
let mut meshes_on_non_skinned_nodes = <HashSet<_>>::default();
661
for gltf_node in gltf.nodes() {
662
if gltf_node.skin().is_some() {
663
if let Some(mesh) = gltf_node.mesh() {
664
meshes_on_skinned_nodes.insert(mesh.index());
665
}
666
} else if let Some(mesh) = gltf_node.mesh() {
667
meshes_on_non_skinned_nodes.insert(mesh.index());
668
}
669
}
670
for gltf_mesh in gltf.meshes() {
671
let mut primitives = vec![];
672
for primitive in gltf_mesh.primitives() {
673
let primitive_label = GltfAssetLabel::Primitive {
674
mesh: gltf_mesh.index(),
675
primitive: primitive.index(),
676
};
677
let primitive_topology = primitive_topology(primitive.mode())?;
678
679
let mut mesh = Mesh::new(primitive_topology, settings.load_meshes);
680
681
// Read vertex attributes
682
for (semantic, accessor) in primitive.attributes() {
683
if [Semantic::Joints(0), Semantic::Weights(0)].contains(&semantic) {
684
if !meshes_on_skinned_nodes.contains(&gltf_mesh.index()) {
685
warn!(
686
"Ignoring attribute {:?} for skinned mesh {} used on non skinned nodes (NODE_SKINNED_MESH_WITHOUT_SKIN)",
687
semantic,
688
primitive_label
689
);
690
continue;
691
} else if meshes_on_non_skinned_nodes.contains(&gltf_mesh.index()) {
692
error!("Skinned mesh {} used on both skinned and non skin nodes, this is likely to cause an error (NODE_SKINNED_MESH_WITHOUT_SKIN)", primitive_label);
693
}
694
}
695
match convert_attribute(
696
semantic,
697
accessor,
698
&buffer_data,
699
&loader.custom_vertex_attributes,
700
convert_coordinates,
701
) {
702
Ok((attribute, values)) => mesh.insert_attribute(attribute, values),
703
Err(err) => warn!("{}", err),
704
}
705
}
706
707
// Read vertex indices
708
let reader =
709
primitive.reader(|buffer| Some(buffer_data[buffer.index()].as_slice()));
710
if let Some(indices) = reader.read_indices() {
711
mesh.insert_indices(match indices {
712
ReadIndices::U8(is) => Indices::U16(is.map(|x| x as u16).collect()),
713
ReadIndices::U16(is) => Indices::U16(is.collect()),
714
ReadIndices::U32(is) => Indices::U32(is.collect()),
715
});
716
};
717
718
{
719
let morph_target_reader = reader.read_morph_targets();
720
if morph_target_reader.len() != 0 {
721
let morph_targets_label = GltfAssetLabel::MorphTarget {
722
mesh: gltf_mesh.index(),
723
primitive: primitive.index(),
724
};
725
let morph_target_image = MorphTargetImage::new(
726
morph_target_reader.map(|i| PrimitiveMorphAttributesIter {
727
convert_coordinates,
728
positions: i.0,
729
normals: i.1,
730
tangents: i.2,
731
}),
732
mesh.count_vertices(),
733
RenderAssetUsages::default(),
734
)?;
735
let handle = load_context.add_labeled_asset(
736
morph_targets_label.to_string(),
737
morph_target_image.0,
738
);
739
740
mesh.set_morph_targets(handle);
741
let extras = gltf_mesh.extras().as_ref();
742
if let Some(names) = extras.and_then(|extras| {
743
serde_json::from_str::<MorphTargetNames>(extras.get()).ok()
744
}) {
745
mesh.set_morph_target_names(names.target_names);
746
}
747
}
748
}
749
750
if mesh.attribute(Mesh::ATTRIBUTE_NORMAL).is_none()
751
&& matches!(mesh.primitive_topology(), PrimitiveTopology::TriangleList)
752
{
753
tracing::debug!(
754
"Automatically calculating missing vertex normals for geometry."
755
);
756
let vertex_count_before = mesh.count_vertices();
757
mesh.duplicate_vertices();
758
mesh.compute_flat_normals();
759
let vertex_count_after = mesh.count_vertices();
760
if vertex_count_before != vertex_count_after {
761
tracing::debug!("Missing vertex normals in indexed geometry, computing them as flat. Vertex count increased from {} to {}", vertex_count_before, vertex_count_after);
762
} else {
763
tracing::debug!(
764
"Missing vertex normals in indexed geometry, computing them as flat."
765
);
766
}
767
}
768
769
if !mesh.contains_attribute(Mesh::ATTRIBUTE_TANGENT)
770
&& mesh.contains_attribute(Mesh::ATTRIBUTE_NORMAL)
771
&& needs_tangents(&primitive.material())
772
{
773
tracing::debug!(
774
"Missing vertex tangents for {}, computing them using the mikktspace algorithm. Consider using a tool such as Blender to pre-compute the tangents.", file_name
775
);
776
777
let generate_tangents_span = info_span!("generate_tangents", name = file_name);
778
779
generate_tangents_span.in_scope(|| {
780
if let Err(err) = mesh.generate_tangents() {
781
warn!(
782
"Failed to generate vertex tangents using the mikktspace algorithm: {}",
783
err
784
);
785
}
786
});
787
}
788
789
let mesh_handle = load_context.add_labeled_asset(primitive_label.to_string(), mesh);
790
primitives.push(super::GltfPrimitive::new(
791
&gltf_mesh,
792
&primitive,
793
mesh_handle,
794
primitive
795
.material()
796
.index()
797
.and_then(|i| materials.get(i).cloned()),
798
primitive.extras().as_deref().map(GltfExtras::from),
799
primitive
800
.material()
801
.extras()
802
.as_deref()
803
.map(GltfExtras::from),
804
));
805
}
806
807
let mesh = super::GltfMesh::new(
808
&gltf_mesh,
809
primitives,
810
gltf_mesh.extras().as_deref().map(GltfExtras::from),
811
);
812
813
let handle = load_context.add_labeled_asset(mesh.asset_label().to_string(), mesh);
814
if let Some(name) = gltf_mesh.name() {
815
named_meshes.insert(name.into(), handle.clone());
816
}
817
meshes.push(handle);
818
}
819
820
let skinned_mesh_inverse_bindposes: Vec<_> = gltf
821
.skins()
822
.map(|gltf_skin| {
823
let reader = gltf_skin.reader(|buffer| Some(&buffer_data[buffer.index()]));
824
let local_to_bone_bind_matrices: Vec<Mat4> = reader
825
.read_inverse_bind_matrices()
826
.map(|mats| {
827
mats.map(|mat| Mat4::from_cols_array_2d(&mat))
828
.map(|mat| {
829
if convert_coordinates {
830
mat.convert_coordinates()
831
} else {
832
mat
833
}
834
})
835
.collect()
836
})
837
.unwrap_or_else(|| {
838
core::iter::repeat_n(Mat4::IDENTITY, gltf_skin.joints().len()).collect()
839
});
840
841
load_context.add_labeled_asset(
842
GltfAssetLabel::InverseBindMatrices(gltf_skin.index()).to_string(),
843
SkinnedMeshInverseBindposes::from(local_to_bone_bind_matrices),
844
)
845
})
846
.collect();
847
848
let mut nodes = HashMap::<usize, Handle<GltfNode>>::default();
849
let mut named_nodes = <HashMap<_, _>>::default();
850
let mut skins = <HashMap<_, _>>::default();
851
let mut named_skins = <HashMap<_, _>>::default();
852
853
// First, create the node handles.
854
for node in gltf.nodes() {
855
let label = GltfAssetLabel::Node(node.index());
856
let label_handle = load_context.get_label_handle(label.to_string());
857
nodes.insert(node.index(), label_handle);
858
}
859
860
// Then check for cycles.
861
check_for_cycles(&gltf)?;
862
863
// Now populate the nodes.
864
for node in gltf.nodes() {
865
let skin = node.skin().map(|skin| {
866
skins
867
.entry(skin.index())
868
.or_insert_with(|| {
869
let joints: Vec<_> = skin
870
.joints()
871
.map(|joint| nodes.get(&joint.index()).unwrap().clone())
872
.collect();
873
874
if joints.len() > MAX_JOINTS {
875
warn!(
876
"The glTF skin {} has {} joints, but the maximum supported is {}",
877
skin.name()
878
.map(ToString::to_string)
879
.unwrap_or_else(|| skin.index().to_string()),
880
joints.len(),
881
MAX_JOINTS
882
);
883
}
884
885
let gltf_skin = GltfSkin::new(
886
&skin,
887
joints,
888
skinned_mesh_inverse_bindposes[skin.index()].clone(),
889
skin.extras().as_deref().map(GltfExtras::from),
890
);
891
892
let handle = load_context
893
.add_labeled_asset(gltf_skin.asset_label().to_string(), gltf_skin);
894
895
if let Some(name) = skin.name() {
896
named_skins.insert(name.into(), handle.clone());
897
}
898
899
handle
900
})
901
.clone()
902
});
903
904
let children = node
905
.children()
906
.map(|child| nodes.get(&child.index()).unwrap().clone())
907
.collect();
908
909
let mesh = node
910
.mesh()
911
.map(|mesh| mesh.index())
912
.and_then(|i| meshes.get(i).cloned());
913
914
let gltf_node = GltfNode::new(
915
&node,
916
children,
917
mesh,
918
node_transform(&node, convert_coordinates),
919
skin,
920
node.extras().as_deref().map(GltfExtras::from),
921
);
922
923
#[cfg(feature = "bevy_animation")]
924
let gltf_node = gltf_node.with_animation_root(animation_roots.contains(&node.index()));
925
926
let handle =
927
load_context.add_labeled_asset(gltf_node.asset_label().to_string(), gltf_node);
928
nodes.insert(node.index(), handle.clone());
929
if let Some(name) = node.name() {
930
named_nodes.insert(name.into(), handle);
931
}
932
}
933
934
let mut nodes_to_sort = nodes.into_iter().collect::<Vec<_>>();
935
nodes_to_sort.sort_by_key(|(i, _)| *i);
936
let nodes = nodes_to_sort
937
.into_iter()
938
.map(|(_, resolved)| resolved)
939
.collect();
940
941
let mut scenes = vec![];
942
let mut named_scenes = <HashMap<_, _>>::default();
943
let mut active_camera_found = false;
944
for scene in gltf.scenes() {
945
let mut err = None;
946
let mut world = World::default();
947
let mut node_index_to_entity_map = <HashMap<_, _>>::default();
948
let mut entity_to_skin_index_map = EntityHashMap::default();
949
let mut scene_load_context = load_context.begin_labeled_asset();
950
951
let world_root_id = world
952
.spawn((Transform::default(), Visibility::default()))
953
.with_children(|parent| {
954
for node in scene.nodes() {
955
let result = load_node(
956
&node,
957
parent,
958
load_context,
959
&mut scene_load_context,
960
settings,
961
&mut node_index_to_entity_map,
962
&mut entity_to_skin_index_map,
963
&mut active_camera_found,
964
&Transform::default(),
965
#[cfg(feature = "bevy_animation")]
966
&animation_roots,
967
#[cfg(feature = "bevy_animation")]
968
None,
969
&gltf.document,
970
convert_coordinates,
971
);
972
if result.is_err() {
973
err = Some(result);
974
return;
975
}
976
}
977
})
978
.id();
979
980
if let Some(extras) = scene.extras().as_ref() {
981
world.entity_mut(world_root_id).insert(GltfSceneExtras {
982
value: extras.get().to_string(),
983
});
984
}
985
986
if let Some(Err(err)) = err {
987
return Err(err);
988
}
989
990
#[cfg(feature = "bevy_animation")]
991
{
992
// for each node root in a scene, check if it's the root of an animation
993
// if it is, add the AnimationPlayer component
994
for node in scene.nodes() {
995
if animation_roots.contains(&node.index()) {
996
world
997
.entity_mut(*node_index_to_entity_map.get(&node.index()).unwrap())
998
.insert(AnimationPlayer::default());
999
}
1000
}
1001
}
1002
1003
for (&entity, &skin_index) in &entity_to_skin_index_map {
1004
let mut entity = world.entity_mut(entity);
1005
let skin = gltf.skins().nth(skin_index).unwrap();
1006
let joint_entities: Vec<_> = skin
1007
.joints()
1008
.map(|node| node_index_to_entity_map[&node.index()])
1009
.collect();
1010
1011
entity.insert(SkinnedMesh {
1012
inverse_bindposes: skinned_mesh_inverse_bindposes[skin_index].clone(),
1013
joints: joint_entities,
1014
});
1015
}
1016
let loaded_scene = scene_load_context.finish(Scene::new(world));
1017
let scene_handle = load_context.add_loaded_labeled_asset(
1018
GltfAssetLabel::Scene(scene.index()).to_string(),
1019
loaded_scene,
1020
);
1021
1022
if let Some(name) = scene.name() {
1023
named_scenes.insert(name.into(), scene_handle.clone());
1024
}
1025
scenes.push(scene_handle);
1026
}
1027
1028
Ok(Gltf {
1029
default_scene: gltf
1030
.default_scene()
1031
.and_then(|scene| scenes.get(scene.index()))
1032
.cloned(),
1033
scenes,
1034
named_scenes,
1035
meshes,
1036
named_meshes,
1037
skins: skins.into_values().collect(),
1038
named_skins,
1039
materials,
1040
named_materials,
1041
nodes,
1042
named_nodes,
1043
#[cfg(feature = "bevy_animation")]
1044
animations,
1045
#[cfg(feature = "bevy_animation")]
1046
named_animations,
1047
source: if settings.include_source {
1048
Some(gltf)
1049
} else {
1050
None
1051
},
1052
})
1053
}
1054
}
1055
1056
impl AssetLoader for GltfLoader {
1057
type Asset = Gltf;
1058
type Settings = GltfLoaderSettings;
1059
type Error = GltfError;
1060
async fn load(
1061
&self,
1062
reader: &mut dyn Reader,
1063
settings: &GltfLoaderSettings,
1064
load_context: &mut LoadContext<'_>,
1065
) -> Result<Gltf, Self::Error> {
1066
let mut bytes = Vec::new();
1067
reader.read_to_end(&mut bytes).await?;
1068
1069
Self::load_gltf(self, &bytes, load_context, settings).await
1070
}
1071
1072
fn extensions(&self) -> &[&str] {
1073
&["gltf", "glb"]
1074
}
1075
}
1076
1077
/// Loads a glTF texture as a bevy [`Image`] and returns it together with its label.
1078
async fn load_image<'a, 'b>(
1079
gltf_texture: gltf::Texture<'a>,
1080
buffer_data: &[Vec<u8>],
1081
linear_textures: &HashSet<usize>,
1082
parent_path: &'b Path,
1083
supported_compressed_formats: CompressedImageFormats,
1084
default_sampler: &ImageSamplerDescriptor,
1085
settings: &GltfLoaderSettings,
1086
) -> Result<ImageOrPath, GltfError> {
1087
let is_srgb = !linear_textures.contains(&gltf_texture.index());
1088
let sampler_descriptor = if settings.override_sampler {
1089
default_sampler.clone()
1090
} else {
1091
texture_sampler(&gltf_texture, default_sampler)
1092
};
1093
1094
match gltf_texture.source().source() {
1095
Source::View { view, mime_type } => {
1096
let start = view.offset();
1097
let end = view.offset() + view.length();
1098
let buffer = &buffer_data[view.buffer().index()][start..end];
1099
let image = Image::from_buffer(
1100
buffer,
1101
ImageType::MimeType(mime_type),
1102
supported_compressed_formats,
1103
is_srgb,
1104
ImageSampler::Descriptor(sampler_descriptor),
1105
settings.load_materials,
1106
)?;
1107
Ok(ImageOrPath::Image {
1108
image,
1109
label: GltfAssetLabel::Texture(gltf_texture.index()),
1110
})
1111
}
1112
Source::Uri { uri, mime_type } => {
1113
let uri = percent_encoding::percent_decode_str(uri)
1114
.decode_utf8()
1115
.unwrap();
1116
let uri = uri.as_ref();
1117
if let Ok(data_uri) = DataUri::parse(uri) {
1118
let bytes = data_uri.decode()?;
1119
let image_type = ImageType::MimeType(data_uri.mime_type);
1120
Ok(ImageOrPath::Image {
1121
image: Image::from_buffer(
1122
&bytes,
1123
mime_type.map(ImageType::MimeType).unwrap_or(image_type),
1124
supported_compressed_formats,
1125
is_srgb,
1126
ImageSampler::Descriptor(sampler_descriptor),
1127
settings.load_materials,
1128
)?,
1129
label: GltfAssetLabel::Texture(gltf_texture.index()),
1130
})
1131
} else {
1132
let image_path = parent_path.join(uri);
1133
Ok(ImageOrPath::Path {
1134
path: image_path,
1135
is_srgb,
1136
sampler_descriptor,
1137
})
1138
}
1139
}
1140
}
1141
}
1142
1143
/// Loads a glTF material as a bevy [`StandardMaterial`] and returns it.
1144
fn load_material(
1145
material: &Material,
1146
load_context: &mut LoadContext,
1147
document: &Document,
1148
is_scale_inverted: bool,
1149
) -> Handle<StandardMaterial> {
1150
let material_label = material_label(material, is_scale_inverted);
1151
load_context
1152
.labeled_asset_scope::<_, ()>(material_label.to_string(), |load_context| {
1153
let pbr = material.pbr_metallic_roughness();
1154
1155
// TODO: handle missing label handle errors here?
1156
let color = pbr.base_color_factor();
1157
let base_color_channel = pbr
1158
.base_color_texture()
1159
.map(|info| uv_channel(material, "base color", info.tex_coord()))
1160
.unwrap_or_default();
1161
let base_color_texture = pbr
1162
.base_color_texture()
1163
.map(|info| texture_handle(&info.texture(), load_context));
1164
1165
let uv_transform = pbr
1166
.base_color_texture()
1167
.and_then(|info| info.texture_transform().map(texture_transform_to_affine2))
1168
.unwrap_or_default();
1169
1170
let normal_map_channel = material
1171
.normal_texture()
1172
.map(|info| uv_channel(material, "normal map", info.tex_coord()))
1173
.unwrap_or_default();
1174
let normal_map_texture: Option<Handle<Image>> =
1175
material.normal_texture().map(|normal_texture| {
1176
// TODO: handle normal_texture.scale
1177
texture_handle(&normal_texture.texture(), load_context)
1178
});
1179
1180
let metallic_roughness_channel = pbr
1181
.metallic_roughness_texture()
1182
.map(|info| uv_channel(material, "metallic/roughness", info.tex_coord()))
1183
.unwrap_or_default();
1184
let metallic_roughness_texture = pbr.metallic_roughness_texture().map(|info| {
1185
warn_on_differing_texture_transforms(
1186
material,
1187
&info,
1188
uv_transform,
1189
"metallic/roughness",
1190
);
1191
texture_handle(&info.texture(), load_context)
1192
});
1193
1194
let occlusion_channel = material
1195
.occlusion_texture()
1196
.map(|info| uv_channel(material, "occlusion", info.tex_coord()))
1197
.unwrap_or_default();
1198
let occlusion_texture = material.occlusion_texture().map(|occlusion_texture| {
1199
// TODO: handle occlusion_texture.strength() (a scalar multiplier for occlusion strength)
1200
texture_handle(&occlusion_texture.texture(), load_context)
1201
});
1202
1203
let emissive = material.emissive_factor();
1204
let emissive_channel = material
1205
.emissive_texture()
1206
.map(|info| uv_channel(material, "emissive", info.tex_coord()))
1207
.unwrap_or_default();
1208
let emissive_texture = material.emissive_texture().map(|info| {
1209
// TODO: handle occlusion_texture.strength() (a scalar multiplier for occlusion strength)
1210
warn_on_differing_texture_transforms(material, &info, uv_transform, "emissive");
1211
texture_handle(&info.texture(), load_context)
1212
});
1213
1214
#[cfg(feature = "pbr_transmission_textures")]
1215
let (
1216
specular_transmission,
1217
specular_transmission_channel,
1218
specular_transmission_texture,
1219
) = material
1220
.transmission()
1221
.map_or((0.0, UvChannel::Uv0, None), |transmission| {
1222
let specular_transmission_channel = transmission
1223
.transmission_texture()
1224
.map(|info| uv_channel(material, "specular/transmission", info.tex_coord()))
1225
.unwrap_or_default();
1226
let transmission_texture: Option<Handle<Image>> = transmission
1227
.transmission_texture()
1228
.map(|transmission_texture| {
1229
texture_handle(&transmission_texture.texture(), load_context)
1230
});
1231
1232
(
1233
transmission.transmission_factor(),
1234
specular_transmission_channel,
1235
transmission_texture,
1236
)
1237
});
1238
1239
#[cfg(not(feature = "pbr_transmission_textures"))]
1240
let specular_transmission = material
1241
.transmission()
1242
.map_or(0.0, |transmission| transmission.transmission_factor());
1243
1244
#[cfg(feature = "pbr_transmission_textures")]
1245
let (
1246
thickness,
1247
thickness_channel,
1248
thickness_texture,
1249
attenuation_distance,
1250
attenuation_color,
1251
) = material.volume().map_or(
1252
(0.0, UvChannel::Uv0, None, f32::INFINITY, [1.0, 1.0, 1.0]),
1253
|volume| {
1254
let thickness_channel = volume
1255
.thickness_texture()
1256
.map(|info| uv_channel(material, "thickness", info.tex_coord()))
1257
.unwrap_or_default();
1258
let thickness_texture: Option<Handle<Image>> =
1259
volume.thickness_texture().map(|thickness_texture| {
1260
texture_handle(&thickness_texture.texture(), load_context)
1261
});
1262
1263
(
1264
volume.thickness_factor(),
1265
thickness_channel,
1266
thickness_texture,
1267
volume.attenuation_distance(),
1268
volume.attenuation_color(),
1269
)
1270
},
1271
);
1272
1273
#[cfg(not(feature = "pbr_transmission_textures"))]
1274
let (thickness, attenuation_distance, attenuation_color) =
1275
material
1276
.volume()
1277
.map_or((0.0, f32::INFINITY, [1.0, 1.0, 1.0]), |volume| {
1278
(
1279
volume.thickness_factor(),
1280
volume.attenuation_distance(),
1281
volume.attenuation_color(),
1282
)
1283
});
1284
1285
let ior = material.ior().unwrap_or(1.5);
1286
1287
// Parse the `KHR_materials_clearcoat` extension data if necessary.
1288
let clearcoat =
1289
ClearcoatExtension::parse(load_context, document, material).unwrap_or_default();
1290
1291
// Parse the `KHR_materials_anisotropy` extension data if necessary.
1292
let anisotropy =
1293
AnisotropyExtension::parse(load_context, document, material).unwrap_or_default();
1294
1295
// Parse the `KHR_materials_specular` extension data if necessary.
1296
let specular =
1297
SpecularExtension::parse(load_context, document, material).unwrap_or_default();
1298
1299
// We need to operate in the Linear color space and be willing to exceed 1.0 in our channels
1300
let base_emissive = LinearRgba::rgb(emissive[0], emissive[1], emissive[2]);
1301
let emissive = base_emissive * material.emissive_strength().unwrap_or(1.0);
1302
1303
Ok(StandardMaterial {
1304
base_color: Color::linear_rgba(color[0], color[1], color[2], color[3]),
1305
base_color_channel,
1306
base_color_texture,
1307
perceptual_roughness: pbr.roughness_factor(),
1308
metallic: pbr.metallic_factor(),
1309
metallic_roughness_channel,
1310
metallic_roughness_texture,
1311
normal_map_channel,
1312
normal_map_texture,
1313
double_sided: material.double_sided(),
1314
cull_mode: if material.double_sided() {
1315
None
1316
} else if is_scale_inverted {
1317
Some(Face::Front)
1318
} else {
1319
Some(Face::Back)
1320
},
1321
occlusion_channel,
1322
occlusion_texture,
1323
emissive,
1324
emissive_channel,
1325
emissive_texture,
1326
specular_transmission,
1327
#[cfg(feature = "pbr_transmission_textures")]
1328
specular_transmission_channel,
1329
#[cfg(feature = "pbr_transmission_textures")]
1330
specular_transmission_texture,
1331
thickness,
1332
#[cfg(feature = "pbr_transmission_textures")]
1333
thickness_channel,
1334
#[cfg(feature = "pbr_transmission_textures")]
1335
thickness_texture,
1336
ior,
1337
attenuation_distance,
1338
attenuation_color: Color::linear_rgb(
1339
attenuation_color[0],
1340
attenuation_color[1],
1341
attenuation_color[2],
1342
),
1343
unlit: material.unlit(),
1344
alpha_mode: alpha_mode(material),
1345
uv_transform,
1346
clearcoat: clearcoat.clearcoat_factor.unwrap_or_default() as f32,
1347
clearcoat_perceptual_roughness: clearcoat
1348
.clearcoat_roughness_factor
1349
.unwrap_or_default() as f32,
1350
#[cfg(feature = "pbr_multi_layer_material_textures")]
1351
clearcoat_channel: clearcoat.clearcoat_channel,
1352
#[cfg(feature = "pbr_multi_layer_material_textures")]
1353
clearcoat_texture: clearcoat.clearcoat_texture,
1354
#[cfg(feature = "pbr_multi_layer_material_textures")]
1355
clearcoat_roughness_channel: clearcoat.clearcoat_roughness_channel,
1356
#[cfg(feature = "pbr_multi_layer_material_textures")]
1357
clearcoat_roughness_texture: clearcoat.clearcoat_roughness_texture,
1358
#[cfg(feature = "pbr_multi_layer_material_textures")]
1359
clearcoat_normal_channel: clearcoat.clearcoat_normal_channel,
1360
#[cfg(feature = "pbr_multi_layer_material_textures")]
1361
clearcoat_normal_texture: clearcoat.clearcoat_normal_texture,
1362
anisotropy_strength: anisotropy.anisotropy_strength.unwrap_or_default() as f32,
1363
anisotropy_rotation: anisotropy.anisotropy_rotation.unwrap_or_default() as f32,
1364
#[cfg(feature = "pbr_anisotropy_texture")]
1365
anisotropy_channel: anisotropy.anisotropy_channel,
1366
#[cfg(feature = "pbr_anisotropy_texture")]
1367
anisotropy_texture: anisotropy.anisotropy_texture,
1368
// From the `KHR_materials_specular` spec:
1369
// <https://github.com/KhronosGroup/glTF/tree/main/extensions/2.0/Khronos/KHR_materials_specular#materials-with-reflectance-parameter>
1370
reflectance: specular.specular_factor.unwrap_or(1.0) as f32 * 0.5,
1371
#[cfg(feature = "pbr_specular_textures")]
1372
specular_channel: specular.specular_channel,
1373
#[cfg(feature = "pbr_specular_textures")]
1374
specular_texture: specular.specular_texture,
1375
specular_tint: match specular.specular_color_factor {
1376
Some(color) => {
1377
Color::linear_rgb(color[0] as f32, color[1] as f32, color[2] as f32)
1378
}
1379
None => Color::WHITE,
1380
},
1381
#[cfg(feature = "pbr_specular_textures")]
1382
specular_tint_channel: specular.specular_color_channel,
1383
#[cfg(feature = "pbr_specular_textures")]
1384
specular_tint_texture: specular.specular_color_texture,
1385
..Default::default()
1386
})
1387
})
1388
.unwrap()
1389
}
1390
1391
/// Loads a glTF node.
1392
#[cfg_attr(
1393
not(target_arch = "wasm32"),
1394
expect(
1395
clippy::result_large_err,
1396
reason = "`GltfError` is only barely past the threshold for large errors."
1397
)
1398
)]
1399
fn load_node(
1400
gltf_node: &Node,
1401
child_spawner: &mut ChildSpawner,
1402
root_load_context: &LoadContext,
1403
load_context: &mut LoadContext,
1404
settings: &GltfLoaderSettings,
1405
node_index_to_entity_map: &mut HashMap<usize, Entity>,
1406
entity_to_skin_index_map: &mut EntityHashMap<usize>,
1407
active_camera_found: &mut bool,
1408
parent_transform: &Transform,
1409
#[cfg(feature = "bevy_animation")] animation_roots: &HashSet<usize>,
1410
#[cfg(feature = "bevy_animation")] mut animation_context: Option<AnimationContext>,
1411
document: &Document,
1412
convert_coordinates: bool,
1413
) -> Result<(), GltfError> {
1414
let mut gltf_error = None;
1415
let transform = node_transform(gltf_node, convert_coordinates);
1416
let world_transform = *parent_transform * transform;
1417
// according to https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#instantiation,
1418
// if the determinant of the transform is negative we must invert the winding order of
1419
// triangles in meshes on the node.
1420
// instead we equivalently test if the global scale is inverted by checking if the number
1421
// of negative scale factors is odd. if so we will assign a copy of the material with face
1422
// culling inverted, rather than modifying the mesh data directly.
1423
let is_scale_inverted = world_transform.scale.is_negative_bitmask().count_ones() & 1 == 1;
1424
let mut node = child_spawner.spawn((transform, Visibility::default()));
1425
1426
let name = node_name(gltf_node);
1427
node.insert(name.clone());
1428
1429
#[cfg(feature = "bevy_animation")]
1430
if animation_context.is_none() && animation_roots.contains(&gltf_node.index()) {
1431
// This is an animation root. Make a new animation context.
1432
animation_context = Some(AnimationContext {
1433
root: node.id(),
1434
path: SmallVec::new(),
1435
});
1436
}
1437
1438
#[cfg(feature = "bevy_animation")]
1439
if let Some(ref mut animation_context) = animation_context {
1440
animation_context.path.push(name);
1441
1442
node.insert(AnimationTarget {
1443
id: AnimationTargetId::from_names(animation_context.path.iter()),
1444
player: animation_context.root,
1445
});
1446
}
1447
1448
if let Some(extras) = gltf_node.extras() {
1449
node.insert(GltfExtras {
1450
value: extras.get().to_string(),
1451
});
1452
}
1453
1454
// create camera node
1455
if settings.load_cameras
1456
&& let Some(camera) = gltf_node.camera()
1457
{
1458
let projection = match camera.projection() {
1459
gltf::camera::Projection::Orthographic(orthographic) => {
1460
let xmag = orthographic.xmag();
1461
let orthographic_projection = OrthographicProjection {
1462
near: orthographic.znear(),
1463
far: orthographic.zfar(),
1464
scaling_mode: ScalingMode::FixedHorizontal {
1465
viewport_width: xmag,
1466
},
1467
..OrthographicProjection::default_3d()
1468
};
1469
Projection::Orthographic(orthographic_projection)
1470
}
1471
gltf::camera::Projection::Perspective(perspective) => {
1472
let mut perspective_projection: PerspectiveProjection = PerspectiveProjection {
1473
fov: perspective.yfov(),
1474
near: perspective.znear(),
1475
..Default::default()
1476
};
1477
if let Some(zfar) = perspective.zfar() {
1478
perspective_projection.far = zfar;
1479
}
1480
if let Some(aspect_ratio) = perspective.aspect_ratio() {
1481
perspective_projection.aspect_ratio = aspect_ratio;
1482
}
1483
Projection::Perspective(perspective_projection)
1484
}
1485
};
1486
1487
node.insert((
1488
Camera3d::default(),
1489
projection,
1490
transform,
1491
Camera {
1492
is_active: !*active_camera_found,
1493
..Default::default()
1494
},
1495
));
1496
1497
*active_camera_found = true;
1498
}
1499
1500
// Map node index to entity
1501
node_index_to_entity_map.insert(gltf_node.index(), node.id());
1502
1503
let mut morph_weights = None;
1504
1505
node.with_children(|parent| {
1506
// Only include meshes in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_meshes flag
1507
if !settings.load_meshes.is_empty()
1508
&& let Some(mesh) = gltf_node.mesh()
1509
{
1510
// append primitives
1511
for primitive in mesh.primitives() {
1512
let material = primitive.material();
1513
let material_label = material_label(&material, is_scale_inverted).to_string();
1514
1515
// This will make sure we load the default material now since it would not have been
1516
// added when iterating over all the gltf materials (since the default material is
1517
// not explicitly listed in the gltf).
1518
// It also ensures an inverted scale copy is instantiated if required.
1519
if !root_load_context.has_labeled_asset(&material_label)
1520
&& !load_context.has_labeled_asset(&material_label)
1521
{
1522
load_material(&material, load_context, document, is_scale_inverted);
1523
}
1524
1525
let primitive_label = GltfAssetLabel::Primitive {
1526
mesh: mesh.index(),
1527
primitive: primitive.index(),
1528
};
1529
let bounds = primitive.bounding_box();
1530
1531
let mut mesh_entity = parent.spawn((
1532
// TODO: handle missing label handle errors here?
1533
Mesh3d(load_context.get_label_handle(primitive_label.to_string())),
1534
MeshMaterial3d::<StandardMaterial>(
1535
load_context.get_label_handle(&material_label),
1536
),
1537
));
1538
1539
let target_count = primitive.morph_targets().len();
1540
if target_count != 0 {
1541
let weights = match mesh.weights() {
1542
Some(weights) => weights.to_vec(),
1543
None => vec![0.0; target_count],
1544
};
1545
1546
if morph_weights.is_none() {
1547
morph_weights = Some(weights.clone());
1548
}
1549
1550
// unwrap: the parent's call to `MeshMorphWeights::new`
1551
// means this code doesn't run if it returns an `Err`.
1552
// According to https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#morph-targets
1553
// they should all have the same length.
1554
// > All morph target accessors MUST have the same count as
1555
// > the accessors of the original primitive.
1556
mesh_entity.insert(MeshMorphWeights::new(weights).unwrap());
1557
}
1558
1559
let mut bounds_min = Vec3::from_slice(&bounds.min);
1560
let mut bounds_max = Vec3::from_slice(&bounds.max);
1561
1562
if convert_coordinates {
1563
let converted_min = bounds_min.convert_coordinates();
1564
let converted_max = bounds_max.convert_coordinates();
1565
1566
bounds_min = converted_min.min(converted_max);
1567
bounds_max = converted_min.max(converted_max);
1568
}
1569
1570
mesh_entity.insert(Aabb::from_min_max(bounds_min, bounds_max));
1571
1572
if let Some(extras) = primitive.extras() {
1573
mesh_entity.insert(GltfExtras {
1574
value: extras.get().to_string(),
1575
});
1576
}
1577
1578
if let Some(extras) = mesh.extras() {
1579
mesh_entity.insert(GltfMeshExtras {
1580
value: extras.get().to_string(),
1581
});
1582
}
1583
1584
if let Some(extras) = material.extras() {
1585
mesh_entity.insert(GltfMaterialExtras {
1586
value: extras.get().to_string(),
1587
});
1588
}
1589
1590
if let Some(name) = mesh.name() {
1591
mesh_entity.insert(GltfMeshName(name.to_string()));
1592
}
1593
1594
if let Some(name) = material.name() {
1595
mesh_entity.insert(GltfMaterialName(name.to_string()));
1596
}
1597
1598
mesh_entity.insert(Name::new(primitive_name(&mesh, &material)));
1599
1600
// Mark for adding skinned mesh
1601
if let Some(skin) = gltf_node.skin() {
1602
entity_to_skin_index_map.insert(mesh_entity.id(), skin.index());
1603
}
1604
}
1605
}
1606
1607
if settings.load_lights
1608
&& let Some(light) = gltf_node.light()
1609
{
1610
match light.kind() {
1611
gltf::khr_lights_punctual::Kind::Directional => {
1612
let mut entity = parent.spawn(DirectionalLight {
1613
color: Color::srgb_from_array(light.color()),
1614
// NOTE: KHR_punctual_lights defines the intensity units for directional
1615
// lights in lux (lm/m^2) which is what we need.
1616
illuminance: light.intensity(),
1617
..Default::default()
1618
});
1619
if let Some(name) = light.name() {
1620
entity.insert(Name::new(name.to_string()));
1621
}
1622
if let Some(extras) = light.extras() {
1623
entity.insert(GltfExtras {
1624
value: extras.get().to_string(),
1625
});
1626
}
1627
}
1628
gltf::khr_lights_punctual::Kind::Point => {
1629
let mut entity = parent.spawn(PointLight {
1630
color: Color::srgb_from_array(light.color()),
1631
// NOTE: KHR_punctual_lights defines the intensity units for point lights in
1632
// candela (lm/sr) which is luminous intensity and we need luminous power.
1633
// For a point light, luminous power = 4 * pi * luminous intensity
1634
intensity: light.intensity() * core::f32::consts::PI * 4.0,
1635
range: light.range().unwrap_or(20.0),
1636
radius: 0.0,
1637
..Default::default()
1638
});
1639
if let Some(name) = light.name() {
1640
entity.insert(Name::new(name.to_string()));
1641
}
1642
if let Some(extras) = light.extras() {
1643
entity.insert(GltfExtras {
1644
value: extras.get().to_string(),
1645
});
1646
}
1647
}
1648
gltf::khr_lights_punctual::Kind::Spot {
1649
inner_cone_angle,
1650
outer_cone_angle,
1651
} => {
1652
let mut entity = parent.spawn(SpotLight {
1653
color: Color::srgb_from_array(light.color()),
1654
// NOTE: KHR_punctual_lights defines the intensity units for spot lights in
1655
// candela (lm/sr) which is luminous intensity and we need luminous power.
1656
// For a spot light, we map luminous power = 4 * pi * luminous intensity
1657
intensity: light.intensity() * core::f32::consts::PI * 4.0,
1658
range: light.range().unwrap_or(20.0),
1659
radius: light.range().unwrap_or(0.0),
1660
inner_angle: inner_cone_angle,
1661
outer_angle: outer_cone_angle,
1662
..Default::default()
1663
});
1664
if let Some(name) = light.name() {
1665
entity.insert(Name::new(name.to_string()));
1666
}
1667
if let Some(extras) = light.extras() {
1668
entity.insert(GltfExtras {
1669
value: extras.get().to_string(),
1670
});
1671
}
1672
}
1673
}
1674
}
1675
1676
// append other nodes
1677
for child in gltf_node.children() {
1678
if let Err(err) = load_node(
1679
&child,
1680
parent,
1681
root_load_context,
1682
load_context,
1683
settings,
1684
node_index_to_entity_map,
1685
entity_to_skin_index_map,
1686
active_camera_found,
1687
&world_transform,
1688
#[cfg(feature = "bevy_animation")]
1689
animation_roots,
1690
#[cfg(feature = "bevy_animation")]
1691
animation_context.clone(),
1692
document,
1693
convert_coordinates,
1694
) {
1695
gltf_error = Some(err);
1696
return;
1697
}
1698
}
1699
});
1700
1701
// Only include meshes in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_meshes flag
1702
if !settings.load_meshes.is_empty()
1703
&& let (Some(mesh), Some(weights)) = (gltf_node.mesh(), morph_weights)
1704
{
1705
let primitive_label = mesh.primitives().next().map(|p| GltfAssetLabel::Primitive {
1706
mesh: mesh.index(),
1707
primitive: p.index(),
1708
});
1709
let first_mesh =
1710
primitive_label.map(|label| load_context.get_label_handle(label.to_string()));
1711
node.insert(MorphWeights::new(weights, first_mesh)?);
1712
}
1713
1714
if let Some(err) = gltf_error {
1715
Err(err)
1716
} else {
1717
Ok(())
1718
}
1719
}
1720
1721
/// Loads the raw glTF buffer data for a specific glTF file.
1722
async fn load_buffers(
1723
gltf: &gltf::Gltf,
1724
load_context: &mut LoadContext<'_>,
1725
) -> Result<Vec<Vec<u8>>, GltfError> {
1726
const VALID_MIME_TYPES: &[&str] = &["application/octet-stream", "application/gltf-buffer"];
1727
1728
let mut buffer_data = Vec::new();
1729
for buffer in gltf.buffers() {
1730
match buffer.source() {
1731
gltf::buffer::Source::Uri(uri) => {
1732
let uri = percent_encoding::percent_decode_str(uri)
1733
.decode_utf8()
1734
.unwrap();
1735
let uri = uri.as_ref();
1736
let buffer_bytes = match DataUri::parse(uri) {
1737
Ok(data_uri) if VALID_MIME_TYPES.contains(&data_uri.mime_type) => {
1738
data_uri.decode()?
1739
}
1740
Ok(_) => return Err(GltfError::BufferFormatUnsupported),
1741
Err(()) => {
1742
// TODO: Remove this and add dep
1743
let buffer_path = load_context.path().parent().unwrap().join(uri);
1744
load_context.read_asset_bytes(buffer_path).await?
1745
}
1746
};
1747
buffer_data.push(buffer_bytes);
1748
}
1749
gltf::buffer::Source::Bin => {
1750
if let Some(blob) = gltf.blob.as_deref() {
1751
buffer_data.push(blob.into());
1752
} else {
1753
return Err(GltfError::MissingBlob);
1754
}
1755
}
1756
}
1757
}
1758
1759
Ok(buffer_data)
1760
}
1761
1762
struct DataUri<'a> {
1763
pub mime_type: &'a str,
1764
pub base64: bool,
1765
pub data: &'a str,
1766
}
1767
1768
impl<'a> DataUri<'a> {
1769
fn parse(uri: &'a str) -> Result<DataUri<'a>, ()> {
1770
let uri = uri.strip_prefix("data:").ok_or(())?;
1771
let (mime_type, data) = Self::split_once(uri, ',').ok_or(())?;
1772
1773
let (mime_type, base64) = match mime_type.strip_suffix(";base64") {
1774
Some(mime_type) => (mime_type, true),
1775
None => (mime_type, false),
1776
};
1777
1778
Ok(DataUri {
1779
mime_type,
1780
base64,
1781
data,
1782
})
1783
}
1784
1785
fn decode(&self) -> Result<Vec<u8>, base64::DecodeError> {
1786
if self.base64 {
1787
base64::Engine::decode(&base64::engine::general_purpose::STANDARD, self.data)
1788
} else {
1789
Ok(self.data.as_bytes().to_owned())
1790
}
1791
}
1792
1793
fn split_once(input: &str, delimiter: char) -> Option<(&str, &str)> {
1794
let mut iter = input.splitn(2, delimiter);
1795
Some((iter.next()?, iter.next()?))
1796
}
1797
}
1798
1799
enum ImageOrPath {
1800
Image {
1801
image: Image,
1802
label: GltfAssetLabel,
1803
},
1804
Path {
1805
path: PathBuf,
1806
is_srgb: bool,
1807
sampler_descriptor: ImageSamplerDescriptor,
1808
},
1809
}
1810
1811
impl ImageOrPath {
1812
// TODO: use the threaded impl on wasm once wasm thread pool doesn't deadlock on it
1813
// See https://github.com/bevyengine/bevy/issues/1924 for more details
1814
// The taskpool use is also avoided when there is only one texture for performance reasons and
1815
// to avoid https://github.com/bevyengine/bevy/pull/2725
1816
// PERF: could this be a Vec instead? Are gltf texture indices dense?
1817
fn process_loaded_texture(
1818
self,
1819
load_context: &mut LoadContext,
1820
handles: &mut Vec<Handle<Image>>,
1821
) {
1822
let handle = match self {
1823
ImageOrPath::Image { label, image } => {
1824
load_context.add_labeled_asset(label.to_string(), image)
1825
}
1826
ImageOrPath::Path {
1827
path,
1828
is_srgb,
1829
sampler_descriptor,
1830
} => load_context
1831
.loader()
1832
.with_settings(move |settings: &mut ImageLoaderSettings| {
1833
settings.is_srgb = is_srgb;
1834
settings.sampler = ImageSampler::Descriptor(sampler_descriptor.clone());
1835
})
1836
.load(path),
1837
};
1838
handles.push(handle);
1839
}
1840
}
1841
1842
struct PrimitiveMorphAttributesIter<'s> {
1843
convert_coordinates: bool,
1844
positions: Option<Iter<'s, [f32; 3]>>,
1845
normals: Option<Iter<'s, [f32; 3]>>,
1846
tangents: Option<Iter<'s, [f32; 3]>>,
1847
}
1848
1849
impl<'s> Iterator for PrimitiveMorphAttributesIter<'s> {
1850
type Item = MorphAttributes;
1851
1852
fn next(&mut self) -> Option<Self::Item> {
1853
let position = self.positions.as_mut().and_then(Iterator::next);
1854
let normal = self.normals.as_mut().and_then(Iterator::next);
1855
let tangent = self.tangents.as_mut().and_then(Iterator::next);
1856
if position.is_none() && normal.is_none() && tangent.is_none() {
1857
return None;
1858
}
1859
1860
let mut attributes = MorphAttributes {
1861
position: position.map(Into::into).unwrap_or(Vec3::ZERO),
1862
normal: normal.map(Into::into).unwrap_or(Vec3::ZERO),
1863
tangent: tangent.map(Into::into).unwrap_or(Vec3::ZERO),
1864
};
1865
1866
if self.convert_coordinates {
1867
attributes = MorphAttributes {
1868
position: attributes.position.convert_coordinates(),
1869
normal: attributes.normal.convert_coordinates(),
1870
tangent: attributes.tangent.convert_coordinates(),
1871
}
1872
}
1873
1874
Some(attributes)
1875
}
1876
}
1877
1878
/// A helper structure for `load_node` that contains information about the
1879
/// nearest ancestor animation root.
1880
#[cfg(feature = "bevy_animation")]
1881
#[derive(Clone)]
1882
struct AnimationContext {
1883
/// The nearest ancestor animation root.
1884
pub root: Entity,
1885
/// The path to the animation root. This is used for constructing the
1886
/// animation target UUIDs.
1887
pub path: SmallVec<[Name; 8]>,
1888
}
1889
1890
#[derive(Deserialize)]
1891
#[serde(rename_all = "camelCase")]
1892
struct MorphTargetNames {
1893
pub target_names: Vec<String>,
1894
}
1895
1896
#[cfg(test)]
1897
mod test {
1898
use std::path::Path;
1899
1900
use crate::{Gltf, GltfAssetLabel, GltfNode, GltfSkin};
1901
use bevy_app::{App, TaskPoolPlugin};
1902
use bevy_asset::{
1903
io::{
1904
memory::{Dir, MemoryAssetReader},
1905
AssetSource, AssetSourceId,
1906
},
1907
AssetApp, AssetPlugin, AssetServer, Assets, Handle, LoadState,
1908
};
1909
use bevy_ecs::{resource::Resource, world::World};
1910
use bevy_log::LogPlugin;
1911
use bevy_mesh::skinning::SkinnedMeshInverseBindposes;
1912
use bevy_render::mesh::MeshPlugin;
1913
use bevy_scene::ScenePlugin;
1914
1915
fn test_app(dir: Dir) -> App {
1916
let mut app = App::new();
1917
let reader = MemoryAssetReader { root: dir };
1918
app.register_asset_source(
1919
AssetSourceId::Default,
1920
AssetSource::build().with_reader(move || Box::new(reader.clone())),
1921
)
1922
.add_plugins((
1923
LogPlugin::default(),
1924
TaskPoolPlugin::default(),
1925
AssetPlugin::default(),
1926
ScenePlugin,
1927
MeshPlugin,
1928
crate::GltfPlugin::default(),
1929
));
1930
1931
app.finish();
1932
app.cleanup();
1933
1934
app
1935
}
1936
1937
const LARGE_ITERATION_COUNT: usize = 10000;
1938
1939
fn run_app_until(app: &mut App, mut predicate: impl FnMut(&mut World) -> Option<()>) {
1940
for _ in 0..LARGE_ITERATION_COUNT {
1941
app.update();
1942
if predicate(app.world_mut()).is_some() {
1943
return;
1944
}
1945
}
1946
1947
panic!("Ran out of loops to return `Some` from `predicate`");
1948
}
1949
1950
fn load_gltf_into_app(gltf_path: &str, gltf: &str) -> App {
1951
#[expect(
1952
dead_code,
1953
reason = "This struct is used to keep the handle alive. As such, we have no need to handle the handle directly."
1954
)]
1955
#[derive(Resource)]
1956
struct GltfHandle(Handle<Gltf>);
1957
1958
let dir = Dir::default();
1959
dir.insert_asset_text(Path::new(gltf_path), gltf);
1960
let mut app = test_app(dir);
1961
app.update();
1962
let asset_server = app.world().resource::<AssetServer>().clone();
1963
let handle: Handle<Gltf> = asset_server.load(gltf_path.to_string());
1964
let handle_id = handle.id();
1965
app.insert_resource(GltfHandle(handle));
1966
app.update();
1967
run_app_until(&mut app, |_world| {
1968
let load_state = asset_server.get_load_state(handle_id).unwrap();
1969
match load_state {
1970
LoadState::Loaded => Some(()),
1971
LoadState::Failed(err) => panic!("{err}"),
1972
_ => None,
1973
}
1974
});
1975
app
1976
}
1977
1978
#[test]
1979
fn single_node() {
1980
let gltf_path = "test.gltf";
1981
let app = load_gltf_into_app(
1982
gltf_path,
1983
r#"
1984
{
1985
"asset": {
1986
"version": "2.0"
1987
},
1988
"nodes": [
1989
{
1990
"name": "TestSingleNode"
1991
}
1992
],
1993
"scene": 0,
1994
"scenes": [{ "nodes": [0] }]
1995
}
1996
"#,
1997
);
1998
let asset_server = app.world().resource::<AssetServer>();
1999
let handle = asset_server.load(gltf_path);
2000
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
2001
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
2002
let gltf_root = gltf_root_assets.get(&handle).unwrap();
2003
assert!(gltf_root.nodes.len() == 1, "Single node");
2004
assert!(
2005
gltf_root.named_nodes.contains_key("TestSingleNode"),
2006
"Named node is in named nodes"
2007
);
2008
let gltf_node = gltf_node_assets
2009
.get(gltf_root.named_nodes.get("TestSingleNode").unwrap())
2010
.unwrap();
2011
assert_eq!(gltf_node.name, "TestSingleNode", "Correct name");
2012
assert_eq!(gltf_node.index, 0, "Correct index");
2013
assert_eq!(gltf_node.children.len(), 0, "No children");
2014
assert_eq!(gltf_node.asset_label(), GltfAssetLabel::Node(0));
2015
}
2016
2017
#[test]
2018
fn node_hierarchy_no_hierarchy() {
2019
let gltf_path = "test.gltf";
2020
let app = load_gltf_into_app(
2021
gltf_path,
2022
r#"
2023
{
2024
"asset": {
2025
"version": "2.0"
2026
},
2027
"nodes": [
2028
{
2029
"name": "l1"
2030
},
2031
{
2032
"name": "l2"
2033
}
2034
],
2035
"scene": 0,
2036
"scenes": [{ "nodes": [0] }]
2037
}
2038
"#,
2039
);
2040
let asset_server = app.world().resource::<AssetServer>();
2041
let handle = asset_server.load(gltf_path);
2042
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
2043
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
2044
let gltf_root = gltf_root_assets.get(&handle).unwrap();
2045
let result = gltf_root
2046
.nodes
2047
.iter()
2048
.map(|h| gltf_node_assets.get(h).unwrap())
2049
.collect::<Vec<_>>();
2050
assert_eq!(result.len(), 2);
2051
assert_eq!(result[0].name, "l1");
2052
assert_eq!(result[0].children.len(), 0);
2053
assert_eq!(result[1].name, "l2");
2054
assert_eq!(result[1].children.len(), 0);
2055
}
2056
2057
#[test]
2058
fn node_hierarchy_simple_hierarchy() {
2059
let gltf_path = "test.gltf";
2060
let app = load_gltf_into_app(
2061
gltf_path,
2062
r#"
2063
{
2064
"asset": {
2065
"version": "2.0"
2066
},
2067
"nodes": [
2068
{
2069
"name": "l1",
2070
"children": [1]
2071
},
2072
{
2073
"name": "l2"
2074
}
2075
],
2076
"scene": 0,
2077
"scenes": [{ "nodes": [0] }]
2078
}
2079
"#,
2080
);
2081
let asset_server = app.world().resource::<AssetServer>();
2082
let handle = asset_server.load(gltf_path);
2083
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
2084
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
2085
let gltf_root = gltf_root_assets.get(&handle).unwrap();
2086
let result = gltf_root
2087
.nodes
2088
.iter()
2089
.map(|h| gltf_node_assets.get(h).unwrap())
2090
.collect::<Vec<_>>();
2091
assert_eq!(result.len(), 2);
2092
assert_eq!(result[0].name, "l1");
2093
assert_eq!(result[0].children.len(), 1);
2094
assert_eq!(result[1].name, "l2");
2095
assert_eq!(result[1].children.len(), 0);
2096
}
2097
2098
#[test]
2099
fn node_hierarchy_hierarchy() {
2100
let gltf_path = "test.gltf";
2101
let app = load_gltf_into_app(
2102
gltf_path,
2103
r#"
2104
{
2105
"asset": {
2106
"version": "2.0"
2107
},
2108
"nodes": [
2109
{
2110
"name": "l1",
2111
"children": [1]
2112
},
2113
{
2114
"name": "l2",
2115
"children": [2]
2116
},
2117
{
2118
"name": "l3",
2119
"children": [3, 4, 5]
2120
},
2121
{
2122
"name": "l4",
2123
"children": [6]
2124
},
2125
{
2126
"name": "l5"
2127
},
2128
{
2129
"name": "l6"
2130
},
2131
{
2132
"name": "l7"
2133
}
2134
],
2135
"scene": 0,
2136
"scenes": [{ "nodes": [0] }]
2137
}
2138
"#,
2139
);
2140
let asset_server = app.world().resource::<AssetServer>();
2141
let handle = asset_server.load(gltf_path);
2142
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
2143
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
2144
let gltf_root = gltf_root_assets.get(&handle).unwrap();
2145
let result = gltf_root
2146
.nodes
2147
.iter()
2148
.map(|h| gltf_node_assets.get(h).unwrap())
2149
.collect::<Vec<_>>();
2150
assert_eq!(result.len(), 7);
2151
assert_eq!(result[0].name, "l1");
2152
assert_eq!(result[0].children.len(), 1);
2153
assert_eq!(result[1].name, "l2");
2154
assert_eq!(result[1].children.len(), 1);
2155
assert_eq!(result[2].name, "l3");
2156
assert_eq!(result[2].children.len(), 3);
2157
assert_eq!(result[3].name, "l4");
2158
assert_eq!(result[3].children.len(), 1);
2159
assert_eq!(result[4].name, "l5");
2160
assert_eq!(result[4].children.len(), 0);
2161
assert_eq!(result[5].name, "l6");
2162
assert_eq!(result[5].children.len(), 0);
2163
assert_eq!(result[6].name, "l7");
2164
assert_eq!(result[6].children.len(), 0);
2165
}
2166
2167
#[test]
2168
fn node_hierarchy_cyclic() {
2169
let gltf_path = "test.gltf";
2170
let gltf_str = r#"
2171
{
2172
"asset": {
2173
"version": "2.0"
2174
},
2175
"nodes": [
2176
{
2177
"name": "l1",
2178
"children": [1]
2179
},
2180
{
2181
"name": "l2",
2182
"children": [0]
2183
}
2184
],
2185
"scene": 0,
2186
"scenes": [{ "nodes": [0] }]
2187
}
2188
"#;
2189
2190
let dir = Dir::default();
2191
dir.insert_asset_text(Path::new(gltf_path), gltf_str);
2192
let mut app = test_app(dir);
2193
app.update();
2194
let asset_server = app.world().resource::<AssetServer>().clone();
2195
let handle: Handle<Gltf> = asset_server.load(gltf_path);
2196
let handle_id = handle.id();
2197
app.update();
2198
run_app_until(&mut app, |_world| {
2199
let load_state = asset_server.get_load_state(handle_id).unwrap();
2200
if load_state.is_failed() {
2201
Some(())
2202
} else {
2203
None
2204
}
2205
});
2206
let load_state = asset_server.get_load_state(handle_id).unwrap();
2207
assert!(load_state.is_failed());
2208
}
2209
2210
#[test]
2211
fn node_hierarchy_missing_node() {
2212
let gltf_path = "test.gltf";
2213
let gltf_str = r#"
2214
{
2215
"asset": {
2216
"version": "2.0"
2217
},
2218
"nodes": [
2219
{
2220
"name": "l1",
2221
"children": [2]
2222
},
2223
{
2224
"name": "l2"
2225
}
2226
],
2227
"scene": 0,
2228
"scenes": [{ "nodes": [0] }]
2229
}
2230
"#;
2231
2232
let dir = Dir::default();
2233
dir.insert_asset_text(Path::new(gltf_path), gltf_str);
2234
let mut app = test_app(dir);
2235
app.update();
2236
let asset_server = app.world().resource::<AssetServer>().clone();
2237
let handle: Handle<Gltf> = asset_server.load(gltf_path);
2238
let handle_id = handle.id();
2239
app.update();
2240
run_app_until(&mut app, |_world| {
2241
let load_state = asset_server.get_load_state(handle_id).unwrap();
2242
if load_state.is_failed() {
2243
Some(())
2244
} else {
2245
None
2246
}
2247
});
2248
let load_state = asset_server.get_load_state(handle_id).unwrap();
2249
assert!(load_state.is_failed());
2250
}
2251
2252
#[test]
2253
fn skin_node() {
2254
let gltf_path = "test.gltf";
2255
let app = load_gltf_into_app(
2256
gltf_path,
2257
r#"
2258
{
2259
"asset": {
2260
"version": "2.0"
2261
},
2262
"nodes": [
2263
{
2264
"name": "skinned",
2265
"skin": 0,
2266
"children": [1, 2]
2267
},
2268
{
2269
"name": "joint1"
2270
},
2271
{
2272
"name": "joint2"
2273
}
2274
],
2275
"skins": [
2276
{
2277
"inverseBindMatrices": 0,
2278
"joints": [1, 2]
2279
}
2280
],
2281
"buffers": [
2282
{
2283
"uri" : "data:application/gltf-buffer;base64,AACAPwAAAAAAAAAAAAAAAAAAAAAAAIA/AAAAAAAAAAAAAAAAAAAAAAAAgD8AAAAAAAAAAAAAAAAAAAAAAACAPwAAgD8AAAAAAAAAAAAAAAAAAAAAAACAPwAAAAAAAAAAAAAAAAAAAAAAAIA/AAAAAAAAAAAAAIC/AAAAAAAAgD8=",
2284
"byteLength" : 128
2285
}
2286
],
2287
"bufferViews": [
2288
{
2289
"buffer": 0,
2290
"byteLength": 128
2291
}
2292
],
2293
"accessors": [
2294
{
2295
"bufferView" : 0,
2296
"componentType" : 5126,
2297
"count" : 2,
2298
"type" : "MAT4"
2299
}
2300
],
2301
"scene": 0,
2302
"scenes": [{ "nodes": [0] }]
2303
}
2304
"#,
2305
);
2306
let asset_server = app.world().resource::<AssetServer>();
2307
let handle = asset_server.load(gltf_path);
2308
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
2309
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
2310
let gltf_skin_assets = app.world().resource::<Assets<GltfSkin>>();
2311
let gltf_inverse_bind_matrices = app
2312
.world()
2313
.resource::<Assets<SkinnedMeshInverseBindposes>>();
2314
let gltf_root = gltf_root_assets.get(&handle).unwrap();
2315
2316
assert_eq!(gltf_root.skins.len(), 1);
2317
assert_eq!(gltf_root.nodes.len(), 3);
2318
2319
let skin = gltf_skin_assets.get(&gltf_root.skins[0]).unwrap();
2320
assert_eq!(skin.joints.len(), 2);
2321
assert_eq!(skin.joints[0], gltf_root.nodes[1]);
2322
assert_eq!(skin.joints[1], gltf_root.nodes[2]);
2323
assert!(gltf_inverse_bind_matrices.contains(&skin.inverse_bind_matrices));
2324
2325
let skinned_node = gltf_node_assets.get(&gltf_root.nodes[0]).unwrap();
2326
assert_eq!(skinned_node.name, "skinned");
2327
assert_eq!(skinned_node.children.len(), 2);
2328
assert_eq!(skinned_node.skin.as_ref(), Some(&gltf_root.skins[0]));
2329
}
2330
}
2331
2332