aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock103
-rw-r--r--components/canvas/canvas_paint_thread.rs3
-rw-r--r--components/canvas/webgl_paint_thread.rs13
-rw-r--r--components/canvas_traits/lib.rs3
-rw-r--r--components/config/prefs.rs4
-rw-r--r--components/constellation/Cargo.toml1
-rw-r--r--components/constellation/constellation.rs34
-rw-r--r--components/constellation/lib.rs1
-rw-r--r--components/constellation/pipeline.rs6
-rw-r--r--components/layout/block.rs60
-rw-r--r--components/layout/display_list_builder.rs174
-rw-r--r--components/layout/fragment.rs114
-rw-r--r--components/profile/time.rs1
-rw-r--r--components/profile_traits/time.rs1
-rw-r--r--components/script/Cargo.toml2
-rw-r--r--components/script/dom/bindings/conversions.rs58
-rw-r--r--components/script/dom/mod.rs9
-rw-r--r--components/script/dom/navigator.rs16
-rw-r--r--components/script/dom/vr.rs160
-rw-r--r--components/script/dom/vrdisplay.rs607
-rw-r--r--components/script/dom/vrdisplaycapabilities.rs62
-rw-r--r--components/script/dom/vrdisplayevent.rs116
-rw-r--r--components/script/dom/vreyeparameters.rs75
-rw-r--r--components/script/dom/vrfieldofview.rs58
-rw-r--r--components/script/dom/vrframedata.rs122
-rw-r--r--components/script/dom/vrpose.rs133
-rw-r--r--components/script/dom/vrstageparameters.rs75
-rw-r--r--components/script/dom/webidls/Navigator.webidl5
-rw-r--r--components/script/dom/webidls/VR.webidl10
-rw-r--r--components/script/dom/webidls/VRDisplay.webidl131
-rw-r--r--components/script/dom/webidls/VRDisplayCapabilities.webidl13
-rw-r--r--components/script/dom/webidls/VRDisplayEvent.webidl23
-rw-r--r--components/script/dom/webidls/VREyeParameters.webidl13
-rw-r--r--components/script/dom/webidls/VRFieldOfView.webidl13
-rw-r--r--components/script/dom/webidls/VRFrameData.webidl15
-rw-r--r--components/script/dom/webidls/VRLayer.webidl13
-rw-r--r--components/script/dom/webidls/VRPose.webidl14
-rw-r--r--components/script/dom/webidls/VRStageParameters.webidl11
-rw-r--r--components/script/dom/window.rs13
-rw-r--r--components/script/lib.rs1
-rw-r--r--components/script/script_runtime.rs1
-rw-r--r--components/script/script_thread.rs21
-rw-r--r--components/script_layout_interface/wrapper_traits.rs2
-rw-r--r--components/script_traits/Cargo.toml1
-rw-r--r--components/script_traits/lib.rs19
-rw-r--r--components/servo/Cargo.toml2
-rw-r--r--components/servo/lib.rs15
-rw-r--r--components/style/build_gecko.rs3
-rw-r--r--components/style/gecko_bindings/bindings.rs5
-rw-r--r--components/style/gecko_bindings/structs_debug.rs294
-rw-r--r--components/style/gecko_bindings/structs_release.rs294
-rw-r--r--components/style/properties/gecko.mako.rs11
-rw-r--r--components/style/properties/longhand/inherited_text.mako.rs1
-rw-r--r--components/style/stylist.rs44
-rw-r--r--components/webvr/Cargo.toml23
-rw-r--r--components/webvr/lib.rs19
-rw-r--r--components/webvr/webvr_thread.rs377
-rw-r--r--components/webvr_traits/Cargo.toml17
-rw-r--r--components/webvr_traits/lib.rs29
-rw-r--r--components/webvr_traits/webvr_traits.rs24
-rw-r--r--ports/geckolib/glue.rs8
-rw-r--r--resources/prefs.json2
-rw-r--r--tests/html/webvr/advanced-mirroring.html320
-rw-r--r--tests/html/webvr/dynamic-resolution.html312
-rw-r--r--tests/html/webvr/js/third-party/gl-matrix-min.js29
-rw-r--r--tests/html/webvr/js/third-party/webvr-polyfill.js5939
-rw-r--r--tests/html/webvr/js/third-party/wglu/wglu-debug-geometry.js270
-rw-r--r--tests/html/webvr/js/third-party/wglu/wglu-preserve-state.js162
-rw-r--r--tests/html/webvr/js/third-party/wglu/wglu-program.js179
-rw-r--r--tests/html/webvr/js/third-party/wglu/wglu-stats.js649
-rw-r--r--tests/html/webvr/js/third-party/wglu/wglu-texture.js687
-rw-r--r--tests/html/webvr/js/third-party/wglu/wglu-url.js94
-rw-r--r--tests/html/webvr/js/vr-audio-panner.js284
-rw-r--r--tests/html/webvr/js/vr-cube-island.js210
-rw-r--r--tests/html/webvr/js/vr-cube-sea.js188
-rw-r--r--tests/html/webvr/js/vr-panorama.js219
-rw-r--r--tests/html/webvr/js/vr-samples-util.js181
-rw-r--r--tests/html/webvr/media/icons/cardboard64.pngbin0 -> 788 bytes
-rw-r--r--tests/html/webvr/media/textures/cube-sea.pngbin0 -> 52425 bytes
-rw-r--r--tests/html/webvr/room-scale.html312
-rw-r--r--tests/html/webvr/simple-mirroring.html262
-rw-r--r--tests/html/webvr/vr-presentation.html307
82 files changed, 13573 insertions, 529 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 5edcc72af80..7337d596445 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1,24 +1,12 @@
[root]
-name = "webdriver_server"
+name = "webvr_traits"
version = "0.0.1"
dependencies = [
- "cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "hyper 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "image 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ipc-channel 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"msg 0.0.1",
- "net_traits 0.0.1",
- "plugins 0.0.1",
- "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "script_traits 0.0.1",
- "servo_config 0.0.1",
- "servo_url 0.0.1",
- "url 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "uuid 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "webdriver 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rust-webvr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.8.20 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 0.8.20 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -262,7 +250,7 @@ version = "0.0.1"
dependencies = [
"azure 0.9.2 (git+https://github.com/servo/rust-azure)",
"canvas_traits 0.0.1",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"gleam 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)",
"ipc-channel 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -278,7 +266,7 @@ dependencies = [
name = "canvas_traits"
version = "0.0.1"
dependencies = [
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -445,6 +433,7 @@ dependencies = [
"servo_url 0.0.1",
"style_traits 0.0.1",
"webrender_traits 0.11.0 (git+https://github.com/servo/webrender)",
+ "webvr_traits 0.0.1",
]
[[package]]
@@ -512,7 +501,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cssparser"
-version = "0.7.1"
+version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -889,7 +878,7 @@ version = "0.0.1"
dependencies = [
"app_units 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1299,7 +1288,7 @@ dependencies = [
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"canvas_traits 0.0.1",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"gfx 0.0.1",
@@ -1500,6 +1489,8 @@ dependencies = [
"webdriver_server 0.0.1",
"webrender 0.11.0 (git+https://github.com/servo/webrender)",
"webrender_traits 0.11.0 (git+https://github.com/servo/webrender)",
+ "webvr 0.0.1",
+ "webvr_traits 0.0.1",
]
[[package]]
@@ -1642,7 +1633,7 @@ name = "msg"
version = "0.0.1"
dependencies = [
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"plugins 0.0.1",
@@ -2219,6 +2210,18 @@ version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "rust-webvr"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libloading 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.8.20 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 0.8.20 (registry+https://github.com/rust-lang/crates.io-index)",
+ "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "rustc-demangle"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2255,7 +2258,7 @@ dependencies = [
"caseless 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"cmake 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)",
"cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"devtools_traits 0.0.1",
"encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2308,6 +2311,8 @@ dependencies = [
"uuid 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"webrender_traits 0.11.0 (git+https://github.com/servo/webrender)",
"websocket 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "webvr 0.0.1",
+ "webvr_traits 0.0.1",
"xml5ever 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2319,7 +2324,7 @@ dependencies = [
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"canvas_traits 0.0.1",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"gfx_traits 0.0.1",
"heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2379,6 +2384,7 @@ dependencies = [
"style_traits 0.0.1",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "webvr_traits 0.0.1",
]
[[package]]
@@ -2387,7 +2393,7 @@ version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2734,7 +2740,7 @@ dependencies = [
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2777,7 +2783,7 @@ name = "style_tests"
version = "0.0.1"
dependencies = [
"app_units 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"html5ever-atoms 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2798,7 +2804,7 @@ name = "style_traits"
version = "0.0.1"
dependencies = [
"app_units 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2813,7 +2819,7 @@ version = "0.0.1"
dependencies = [
"app_units 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"geckoservo 0.0.1",
@@ -3179,6 +3185,29 @@ dependencies = [
]
[[package]]
+name = "webdriver_server"
+version = "0.0.1"
+dependencies = [
+ "cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "euclid 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hyper 0.9.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "image 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ipc-channel 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "msg 0.0.1",
+ "net_traits 0.0.1",
+ "plugins 0.0.1",
+ "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "script_traits 0.0.1",
+ "servo_config 0.0.1",
+ "servo_url 0.0.1",
+ "url 1.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "uuid 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "webdriver 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "webrender"
version = "0.11.0"
source = "git+https://github.com/servo/webrender#845dcc9f0a2abdbe24754fb830897b0e3666e336"
@@ -3239,6 +3268,19 @@ dependencies = [
]
[[package]]
+name = "webvr"
+version = "0.0.1"
+dependencies = [
+ "ipc-channel 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "msg 0.0.1",
+ "script_traits 0.0.1",
+ "servo_config 0.0.1",
+ "webrender_traits 0.11.0 (git+https://github.com/servo/webrender)",
+ "webvr_traits 0.0.1",
+]
+
+[[package]]
name = "winapi"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3366,7 +3408,7 @@ dependencies = [
"checksum core-graphics 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "66e998abb8823fecd2a8a7205429b17a340d447d8c69b3bce86846dcdea3e33b"
"checksum core-text 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2debbf22a8358e5e270e958b6d65694667be7a2ef9c3a2bf05a0872a3124dc98"
"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
-"checksum cssparser 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c69c28c9a1a2b98ed625e473c5fa756ba05e3dd4df98661b7eb05dc77ba8a2ad"
+"checksum cssparser 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7df2dd7211145ec79b2b3864344c5e0b242bfcdbd4805e9c0d7281a2309b0715"
"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850"
"checksum dbus 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "94d266a872aaf68b50d02083c429a3686935ab6ab54824290509cdc422673eaf"
"checksum debug_unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9a032eac705ca39214d169f83e3d3da290af06d8d1d344d1baad2fd002dca4b3"
@@ -3500,6 +3542,7 @@ dependencies = [
"checksum ref_slice 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "825740057197b7d43025e7faf6477eaabc03434e153233da02d1f44602f71527"
"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
+"checksum rust-webvr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0f1c2770eade344950b6959fb7f4c658200a252a61f265b3487383b82fafe61e"
"checksum rustc-demangle 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1430d286cadb237c17c885e25447c982c97113926bb579f4379c0eca8d9586dc"
"checksum rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "237546c689f20bb44980270c73c3b9edd0891c1be49cc1274406134a66d3957b"
"checksum rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "c5f5376ea5e30ce23c03eb77cbe4962b988deead10910c372b226388b594c084"
diff --git a/components/canvas/canvas_paint_thread.rs b/components/canvas/canvas_paint_thread.rs
index f7c3fdfb6ae..447d60a2862 100644
--- a/components/canvas/canvas_paint_thread.rs
+++ b/components/canvas/canvas_paint_thread.rs
@@ -211,7 +211,8 @@ impl<'a> CanvasPaintThread<'a> {
}
}
}
- CanvasMsg::WebGL(_) => panic!("Wrong message sent to Canvas2D thread"),
+ CanvasMsg::WebGL(_) => panic!("Wrong WebGL message sent to Canvas2D thread"),
+ CanvasMsg::WebVR(_) => panic!("Wrong WebVR message sent to Canvas2D thread"),
}
}
}).expect("Thread spawning failed");
diff --git a/components/canvas/webgl_paint_thread.rs b/components/canvas/webgl_paint_thread.rs
index 7de4fd67979..e852742bc15 100644
--- a/components/canvas/webgl_paint_thread.rs
+++ b/components/canvas/webgl_paint_thread.rs
@@ -144,6 +144,18 @@ impl WebGLPaintThread {
}
}
+ fn handle_webvr_message(&self, message: webrender_traits::VRCompositorCommand) {
+ match self.data {
+ WebGLPaintTaskData::WebRender(ref api, id) => {
+ api.send_vr_compositor_command(id, message);
+ }
+ WebGLPaintTaskData::Readback(..) => {
+ error!("Webrender is required for WebVR implementation");
+ }
+ }
+ }
+
+
/// Creates a new `WebGLPaintThread` and returns an `IpcSender` to
/// communicate with it.
pub fn start(size: Size2D<i32>,
@@ -190,6 +202,7 @@ impl WebGLPaintThread {
}
}
CanvasMsg::Canvas2d(_) => panic!("Wrong message sent to WebGLThread"),
+ CanvasMsg::WebVR(message) => painter.handle_webvr_message(message)
}
}
}).expect("Thread spawning failed");
diff --git a/components/canvas_traits/lib.rs b/components/canvas_traits/lib.rs
index e9e769bb51c..50d431b9a6d 100644
--- a/components/canvas_traits/lib.rs
+++ b/components/canvas_traits/lib.rs
@@ -27,7 +27,7 @@ use euclid::size::Size2D;
use ipc_channel::ipc::IpcSender;
use std::default::Default;
use std::str::FromStr;
-use webrender_traits::{WebGLCommand, WebGLContextId};
+use webrender_traits::{WebGLCommand, WebGLContextId, VRCompositorCommand};
#[derive(Clone, Deserialize, Serialize)]
pub enum FillRule {
@@ -42,6 +42,7 @@ pub enum CanvasMsg {
FromLayout(FromLayoutMsg),
FromScript(FromScriptMsg),
WebGL(WebGLCommand),
+ WebVR(VRCompositorCommand)
}
#[derive(Clone, Deserialize, Serialize)]
diff --git a/components/config/prefs.rs b/components/config/prefs.rs
index c0ad11bfe0b..fd07383f2b2 100644
--- a/components/config/prefs.rs
+++ b/components/config/prefs.rs
@@ -259,4 +259,8 @@ impl Preferences {
pub fn extend(&self, extension: HashMap<String, Pref>) {
self.0.write().unwrap().extend(extension);
}
+
+ pub fn is_webvr_enabled(&self) -> bool {
+ self.get("dom.webvr.enabled").as_boolean().unwrap_or(false)
+ }
}
diff --git a/components/constellation/Cargo.toml b/components/constellation/Cargo.toml
index 8b3cc7f7db1..b10c07703e2 100644
--- a/components/constellation/Cargo.toml
+++ b/components/constellation/Cargo.toml
@@ -36,6 +36,7 @@ servo_config = {path = "../config", features = ["servo"]}
servo_rand = {path = "../rand"}
servo_remutex = {path = "../remutex"}
servo_url = {path = "../url", features = ["servo"]}
+webvr_traits = {path = "../webvr_traits"}
[dependencies.webrender_traits]
git = "https://github.com/servo/webrender"
diff --git a/components/constellation/constellation.rs b/components/constellation/constellation.rs
index 8adf2e6ab29..fc67095dce7 100644
--- a/components/constellation/constellation.rs
+++ b/components/constellation/constellation.rs
@@ -101,6 +101,7 @@ use script_traits::{LayoutMsg as FromLayoutMsg, ScriptMsg as FromScriptMsg, Scri
use script_traits::{LogEntry, ServiceWorkerMsg, webdriver_msg};
use script_traits::{MozBrowserErrorType, MozBrowserEvent, WebDriverCommandMsg, WindowSizeData};
use script_traits::{SWManagerMsg, ScopeThings, WindowSizeType};
+use script_traits::WebVREventMsg;
use servo_config::opts;
use servo_config::prefs::PREFS;
use servo_rand::{Rng, SeedableRng, ServoRng, random};
@@ -122,6 +123,7 @@ use style_traits::cursor::Cursor;
use style_traits::viewport::ViewportConstraints;
use timer_scheduler::TimerScheduler;
use webrender_traits;
+use webvr_traits::WebVRMsg;
/// The `Constellation` itself. In the servo browser, there is one
/// constellation, which maintains all of the browser global data.
@@ -280,6 +282,9 @@ pub struct Constellation<Message, LTF, STF> {
/// Phantom data that keeps the Rust type system happy.
phantom: PhantomData<(Message, LTF, STF)>,
+
+ /// A channel through which messages can be sent to the webvr thread.
+ webvr_thread: Option<IpcSender<WebVRMsg>>,
}
/// State needed to construct a constellation.
@@ -535,6 +540,7 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
info!("Using seed {} for random pipeline closure.", seed);
(rng, prob)
}),
+ webvr_thread: None
};
constellation.run();
@@ -645,6 +651,7 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
prev_visibility: prev_visibility,
webrender_api_sender: self.webrender_api_sender.clone(),
is_private: is_private,
+ webvr_thread: self.webvr_thread.clone()
});
let pipeline = match result {
@@ -879,6 +886,14 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
FromCompositorMsg::LogEntry(top_level_frame_id, thread_name, entry) => {
self.handle_log_entry(top_level_frame_id, thread_name, entry);
}
+ FromCompositorMsg::SetWebVRThread(webvr_thread) => {
+ assert!(self.webvr_thread.is_none());
+ self.webvr_thread = Some(webvr_thread)
+ }
+ FromCompositorMsg::WebVREvent(pipeline_ids, event) => {
+ debug!("constellation got WebVR event");
+ self.handle_webvr_event(pipeline_ids, event);
+ }
}
}
@@ -1186,6 +1201,13 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
}
}
+ if let Some(chan) = self.webvr_thread.as_ref() {
+ debug!("Exiting WebVR thread.");
+ if let Err(e) = chan.send(WebVRMsg::Exit) {
+ warn!("Exit WebVR thread failed ({})", e);
+ }
+ }
+
debug!("Exiting font cache thread.");
self.font_cache_thread.exit();
@@ -1274,6 +1296,18 @@ impl<Message, LTF, STF> Constellation<Message, LTF, STF>
}
}
+ fn handle_webvr_event(&mut self, ids: Vec<PipelineId>, event: WebVREventMsg) {
+ for id in ids {
+ match self.pipelines.get_mut(&id) {
+ Some(ref pipeline) => {
+ // Notify script thread
+ let _ = pipeline.event_loop.send(ConstellationControlMsg::WebVREvent(id, event.clone()));
+ },
+ None => warn!("constellation got webvr event for dead pipeline")
+ }
+ }
+ }
+
fn handle_init_load(&mut self, url: ServoUrl) {
let window_size = self.window_size.visible_viewport;
let root_pipeline_id = PipelineId::new();
diff --git a/components/constellation/lib.rs b/components/constellation/lib.rs
index a42e9fc7407..6c983dbdf62 100644
--- a/components/constellation/lib.rs
+++ b/components/constellation/lib.rs
@@ -41,6 +41,7 @@ extern crate servo_remutex;
extern crate servo_url;
extern crate style_traits;
extern crate webrender_traits;
+extern crate webvr_traits;
mod constellation;
mod event_loop;
diff --git a/components/constellation/pipeline.rs b/components/constellation/pipeline.rs
index aa7d3461615..fba15eefcf7 100644
--- a/components/constellation/pipeline.rs
+++ b/components/constellation/pipeline.rs
@@ -36,6 +36,7 @@ use std::rc::Rc;
use std::sync::mpsc::Sender;
use style_traits::{PagePx, ViewportPx};
use webrender_traits;
+use webvr_traits::WebVRMsg;
/// A `Pipeline` is the constellation's view of a `Document`. Each pipeline has an
/// event loop (executed by a script thread) and a layout thread. A script thread
@@ -169,6 +170,8 @@ pub struct InitialPipelineState {
/// Whether this pipeline is considered private.
pub is_private: bool,
+ /// A channel to the webvr thread.
+ pub webvr_thread: Option<IpcSender<WebVRMsg>>,
}
impl Pipeline {
@@ -268,6 +271,7 @@ impl Pipeline {
script_content_process_shutdown_chan: script_content_process_shutdown_chan,
script_content_process_shutdown_port: script_content_process_shutdown_port,
webrender_api_sender: state.webrender_api_sender,
+ webvr_thread: state.webvr_thread,
};
// Spawn the child process.
@@ -470,6 +474,7 @@ pub struct UnprivilegedPipelineContent {
script_content_process_shutdown_chan: IpcSender<()>,
script_content_process_shutdown_port: IpcReceiver<()>,
webrender_api_sender: webrender_traits::RenderApiSender,
+ webvr_thread: Option<IpcSender<WebVRMsg>>,
}
impl UnprivilegedPipelineContent {
@@ -496,6 +501,7 @@ impl UnprivilegedPipelineContent {
window_size: self.window_size,
pipeline_namespace_id: self.pipeline_namespace_id,
content_process_shutdown_chan: self.script_content_process_shutdown_chan,
+ webvr_thread: self.webvr_thread
}, self.load_data.clone());
LTF::create(self.id,
diff --git a/components/layout/block.rs b/components/layout/block.rs
index 5934a506eda..bdb38742aae 100644
--- a/components/layout/block.rs
+++ b/components/layout/block.rs
@@ -31,7 +31,7 @@ use app_units::{Au, MAX_AU};
use context::{LayoutContext, SharedLayoutContext};
use display_list_builder::{BorderPaintingMode, DisplayListBuildState, FragmentDisplayListBuilding};
use display_list_builder::BlockFlowDisplayListBuilding;
-use euclid::{Point2D, Size2D};
+use euclid::{Point2D, Rect, Size2D};
use floats::{ClearType, FloatKind, Floats, PlacementInfo};
use flow::{self, BaseFlow, EarlyAbsolutePositionInfo, Flow, FlowClass, ForceNonfloatedFlag};
use flow::{BLOCK_POSITION_IS_STATIC, CLEARS_LEFT, CLEARS_RIGHT};
@@ -1766,6 +1766,64 @@ impl BlockFlow {
pub fn is_block_flex_item(&self) -> bool {
self.fragment.flags.contains(IS_BLOCK_FLEX_ITEM)
}
+
+ /// Changes this block's clipping region from its parent's coordinate system to its own
+ /// coordinate system if necessary (i.e. if this block is a stacking context).
+ ///
+ /// The clipping region is initially in each block's parent's coordinate system because the
+ /// parent of each block does not have enough information to determine what the child's
+ /// coordinate system is on its own. Specifically, if the child is absolutely positioned, the
+ /// parent does not know where the child's absolute position is at the time it assigns clipping
+ /// regions, because flows compute their own absolute positions.
+ fn switch_coordinate_system_if_necessary(&mut self) {
+ // Avoid overflows!
+ if self.base.clip.is_max() {
+ return
+ }
+
+ if !self.fragment.establishes_stacking_context() {
+ return
+ }
+
+ let stacking_relative_border_box =
+ self.fragment.stacking_relative_border_box(&self.base.stacking_relative_position,
+ &self.base
+ .early_absolute_position_info
+ .relative_containing_block_size,
+ self.base
+ .early_absolute_position_info
+ .relative_containing_block_mode,
+ CoordinateSystem::Parent);
+ self.base.clip = self.base.clip.translate(&-stacking_relative_border_box.origin);
+
+ // Account for `transform`, if applicable.
+ if self.fragment.style.get_box().transform.0.is_none() {
+ return
+ }
+ let transform = match self.fragment
+ .transform_matrix(&stacking_relative_border_box)
+ .inverse() {
+ Some(transform) => transform,
+ None => {
+ // Singular matrix. Ignore it.
+ return
+ }
+ };
+
+ // FIXME(pcwalton): This is inaccurate: not all transforms are 2D, and not all clips are
+ // axis-aligned.
+ let bounding_rect = self.base.clip.bounding_rect();
+ let bounding_rect = Rect::new(Point2D::new(bounding_rect.origin.x.to_f32_px(),
+ bounding_rect.origin.y.to_f32_px()),
+ Size2D::new(bounding_rect.size.width.to_f32_px(),
+ bounding_rect.size.height.to_f32_px()));
+ let clip_rect = transform.to_2d().transform_rect(&bounding_rect);
+ let clip_rect = Rect::new(Point2D::new(Au::from_f32_px(clip_rect.origin.x),
+ Au::from_f32_px(clip_rect.origin.y)),
+ Size2D::new(Au::from_f32_px(clip_rect.size.width),
+ Au::from_f32_px(clip_rect.size.height)));
+ self.base.clip = ClippingRegion::from_rect(&clip_rect)
+ }
}
impl Flow for BlockFlow {
diff --git a/components/layout/display_list_builder.rs b/components/layout/display_list_builder.rs
index db632be4dd2..8dd881cd277 100644
--- a/components/layout/display_list_builder.rs
+++ b/components/layout/display_list_builder.rs
@@ -14,7 +14,7 @@ use app_units::{AU_PER_PX, Au};
use block::{BlockFlow, BlockStackingContextType};
use canvas_traits::{CanvasData, CanvasMsg, FromLayoutMsg};
use context::SharedLayoutContext;
-use euclid::{Matrix4D, Point2D, Radians, Rect, SideOffsets2D, Size2D};
+use euclid::{Point2D, Rect, SideOffsets2D, Size2D};
use flex::FlexFlow;
use flow::{BaseFlow, Flow, IS_ABSOLUTELY_POSITIONED};
use flow_ref::FlowRef;
@@ -31,7 +31,7 @@ use gfx_traits::{ScrollPolicy, ScrollRootId, StackingContextId};
use inline::{FIRST_FRAGMENT_OF_ELEMENT, InlineFlow, LAST_FRAGMENT_OF_ELEMENT};
use ipc_channel::ipc;
use list_item::ListItemFlow;
-use model::{self, MaybeAuto, ToGfxMatrix};
+use model::{self, MaybeAuto};
use net_traits::image::base::PixelFormat;
use net_traits::image_cache_thread::UsePlaceholder;
use range::Range;
@@ -45,7 +45,7 @@ use std::sync::Arc;
use style::computed_values::{background_attachment, background_clip, background_origin};
use style::computed_values::{background_repeat, background_size, border_style};
use style::computed_values::{cursor, image_rendering, overflow_x};
-use style::computed_values::{pointer_events, position, transform, transform_style, visibility};
+use style::computed_values::{pointer_events, position, transform_style, visibility};
use style::computed_values::_servo_overflow_clip_box as overflow_clip_box;
use style::computed_values::filter::Filter;
use style::computed_values::text_shadow::TextShadow;
@@ -53,7 +53,7 @@ use style::logical_geometry::{LogicalPoint, LogicalRect, LogicalSize, WritingMod
use style::properties::{self, ServoComputedValues};
use style::properties::style_structs;
use style::servo::restyle_damage::REPAINT;
-use style::values::{self, Either, RGBA, computed};
+use style::values::{RGBA, computed};
use style::values::computed::{AngleOrCorner, Gradient, GradientKind, LengthOrPercentage, LengthOrPercentageOrAuto};
use style::values::specified::{HorizontalDirection, VerticalDirection};
use style_traits::cursor::Cursor;
@@ -326,23 +326,6 @@ impl<'a> DisplayListBuildState<'a> {
/// The logical width of an insertion point: at the moment, a one-pixel-wide line.
const INSERTION_POINT_LOGICAL_WIDTH: Au = Au(1 * AU_PER_PX);
-// TODO(gw): The transforms spec says that perspective length must
-// be positive. However, there is some confusion between the spec
-// and browser implementations as to handling the case of 0 for the
-// perspective value. Until the spec bug is resolved, at least ensure
-// that a provided perspective value of <= 0.0 doesn't cause panics
-// and behaves as it does in other browsers.
-// See https://lists.w3.org/Archives/Public/www-style/2016Jan/0020.html for more details.
-#[inline]
-fn create_perspective_matrix(d: Au) -> Matrix4D<f32> {
- let d = d.to_f32_px();
- if d <= 0.0 {
- Matrix4D::identity()
- } else {
- Matrix4D::create_perspective(d)
- }
-}
-
pub trait FragmentDisplayListBuilding {
/// Adds the display items necessary to paint the background of this fragment to the display
/// list if necessary.
@@ -498,9 +481,6 @@ pub trait FragmentDisplayListBuilding {
mode: StackingContextCreationMode,
parent_scroll_id: ScrollRootId)
-> StackingContext;
-
- /// Returns the 4D matrix representing this fragment's transform.
- fn transform_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Matrix4D<f32>;
}
fn handle_overlapping_radii(size: &Size2D<Au>, radii: &BorderRadii<Au>) -> BorderRadii<Au> {
@@ -1566,32 +1546,6 @@ impl FragmentDisplayListBuilding for Fragment {
// Then, using that, compute our overflow region relative to our border box.
let overflow = base_flow.overflow.paint.translate(&-border_box_offset);
- let transform = self.transform_matrix(&border_box);
- let perspective = match self.style().get_box().perspective {
- Either::First(length) => {
- let perspective_origin = self.style().get_box().perspective_origin;
- let perspective_origin =
- Point2D::new(model::specified(perspective_origin.horizontal,
- border_box.size.width).to_f32_px(),
- model::specified(perspective_origin.vertical,
- border_box.size.height).to_f32_px());
-
- let pre_transform = Matrix4D::create_translation(perspective_origin.x,
- perspective_origin.y,
- 0.0);
- let post_transform = Matrix4D::create_translation(-perspective_origin.x,
- -perspective_origin.y,
- 0.0);
-
- let perspective_matrix = create_perspective_matrix(length);
-
- pre_transform.pre_mul(&perspective_matrix).pre_mul(&post_transform)
- }
- Either::Second(values::None_) => {
- Matrix4D::identity()
- }
- };
-
// Create the filter pipeline.
let effects = self.style().get_effects();
let mut filters = effects.filter.clone();
@@ -1615,8 +1569,8 @@ impl FragmentDisplayListBuilding for Fragment {
self.effective_z_index(),
filters,
self.style().get_effects().mix_blend_mode,
- transform,
- perspective,
+ self.transform_matrix(&border_box),
+ self.perspective_matrix(&border_box),
establishes_3d_context,
scroll_policy,
parent_scroll_id)
@@ -1815,63 +1769,6 @@ impl FragmentDisplayListBuilding for Fragment {
}));
}
- fn transform_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Matrix4D<f32> {
- let mut transform = Matrix4D::identity();
- let operations = match self.style.get_box().transform.0 {
- None => return transform,
- Some(ref operations) => operations,
- };
-
- let transform_origin = &self.style.get_box().transform_origin;
- let transform_origin_x = model::specified(transform_origin.horizontal,
- stacking_relative_border_box.size
- .width).to_f32_px();
- let transform_origin_y = model::specified(transform_origin.vertical,
- stacking_relative_border_box.size
- .height).to_f32_px();
- let transform_origin_z = transform_origin.depth.to_f32_px();
-
- let pre_transform = Matrix4D::create_translation(transform_origin_x,
- transform_origin_y,
- transform_origin_z);
- let post_transform = Matrix4D::create_translation(-transform_origin_x,
- -transform_origin_y,
- -transform_origin_z);
-
- for operation in operations {
- let matrix = match *operation {
- transform::ComputedOperation::Rotate(ax, ay, az, theta) => {
- let theta = 2.0f32 * f32::consts::PI - theta.radians();
- Matrix4D::create_rotation(ax, ay, az, Radians::new(theta))
- }
- transform::ComputedOperation::Perspective(d) => {
- create_perspective_matrix(d)
- }
- transform::ComputedOperation::Scale(sx, sy, sz) => {
- Matrix4D::create_scale(sx, sy, sz)
- }
- transform::ComputedOperation::Translate(tx, ty, tz) => {
- let tx =
- model::specified(tx, stacking_relative_border_box.size.width).to_f32_px();
- let ty =
- model::specified(ty, stacking_relative_border_box.size.height).to_f32_px();
- let tz = tz.to_f32_px();
- Matrix4D::create_translation(tx, ty, tz)
- }
- transform::ComputedOperation::Matrix(m) => {
- m.to_gfx_matrix()
- }
- transform::ComputedOperation::Skew(theta_x, theta_y) => {
- Matrix4D::create_skew(Radians::new(theta_x.radians()),
- Radians::new(theta_y.radians()))
- }
- };
-
- transform = transform.pre_mul(&matrix);
- }
-
- pre_transform.pre_mul(&transform).pre_mul(&post_transform)
- }
}
pub trait BlockFlowDisplayListBuilding {
@@ -1881,16 +1778,6 @@ pub trait BlockFlowDisplayListBuilding {
fn build_display_list_for_block(&mut self,
state: &mut DisplayListBuildState,
border_painting_mode: BorderPaintingMode);
-
- /// Changes this block's clipping region from its parent's coordinate system to its own
- /// coordinate system if necessary (i.e. if this block is a stacking context).
- ///
- /// The clipping region is initially in each block's parent's coordinate system because the
- /// parent of each block does not have enough information to determine what the child's
- /// coordinate system is on its own. Specifically, if the child is absolutely positioned, the
- /// parent does not know where the child's absolute position is at the time it assigns clipping
- /// regions, because flows compute their own absolute positions.
- fn switch_coordinate_system_if_necessary(&mut self);
}
impl BlockFlowDisplayListBuilding for BlockFlow {
@@ -2024,55 +1911,6 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
state.processing_scroll_root_element = false;
}
- fn switch_coordinate_system_if_necessary(&mut self) {
- // Avoid overflows!
- if self.base.clip.is_max() {
- return
- }
-
- if !self.fragment.establishes_stacking_context() {
- return
- }
-
- let stacking_relative_border_box =
- self.fragment.stacking_relative_border_box(&self.base.stacking_relative_position,
- &self.base
- .early_absolute_position_info
- .relative_containing_block_size,
- self.base
- .early_absolute_position_info
- .relative_containing_block_mode,
- CoordinateSystem::Parent);
- self.base.clip = self.base.clip.translate(&-stacking_relative_border_box.origin);
-
- // Account for `transform`, if applicable.
- if self.fragment.style.get_box().transform.0.is_none() {
- return
- }
- let transform = match self.fragment
- .transform_matrix(&stacking_relative_border_box)
- .inverse() {
- Some(transform) => transform,
- None => {
- // Singular matrix. Ignore it.
- return
- }
- };
-
- // FIXME(pcwalton): This is inaccurate: not all transforms are 2D, and not all clips are
- // axis-aligned.
- let bounding_rect = self.base.clip.bounding_rect();
- let bounding_rect = Rect::new(Point2D::new(bounding_rect.origin.x.to_f32_px(),
- bounding_rect.origin.y.to_f32_px()),
- Size2D::new(bounding_rect.size.width.to_f32_px(),
- bounding_rect.size.height.to_f32_px()));
- let clip_rect = transform.to_2d().transform_rect(&bounding_rect);
- let clip_rect = Rect::new(Point2D::new(Au::from_f32_px(clip_rect.origin.x),
- Au::from_f32_px(clip_rect.origin.y)),
- Size2D::new(Au::from_f32_px(clip_rect.size.width),
- Au::from_f32_px(clip_rect.size.height)));
- self.base.clip = ClippingRegion::from_rect(&clip_rect)
- }
}
pub trait InlineFlowDisplayListBuilding {
diff --git a/components/layout/fragment.rs b/components/layout/fragment.rs
index 873993f8982..69a6fde214d 100644
--- a/components/layout/fragment.rs
+++ b/components/layout/fragment.rs
@@ -9,7 +9,7 @@
use app_units::Au;
use canvas_traits::CanvasMsg;
use context::{LayoutContext, SharedLayoutContext};
-use euclid::{Point2D, Rect, Size2D};
+use euclid::{Matrix4D, Point2D, Radians, Rect, Size2D};
use floats::ClearType;
use flow::{self, ImmutableFlowUtils};
use flow_ref::FlowRef;
@@ -24,7 +24,7 @@ use ipc_channel::ipc::IpcSender;
#[cfg(debug_assertions)]
use layout_debug;
use model::{self, IntrinsicISizes, IntrinsicISizesContribution, MaybeAuto, SizeConstraint};
-use model::style_length;
+use model::{style_length, ToGfxMatrix};
use msg::constellation_msg::PipelineId;
use net_traits::image::base::{Image, ImageMetadata};
use net_traits::image_cache_thread::{ImageOrMetadataAvailable, UsePlaceholder};
@@ -34,14 +34,14 @@ use script_layout_interface::SVGSVGData;
use script_layout_interface::wrapper_traits::{PseudoElementType, ThreadSafeLayoutElement, ThreadSafeLayoutNode};
use serde::{Serialize, Serializer};
use servo_url::ServoUrl;
+use std::{f32, fmt};
use std::borrow::ToOwned;
use std::cmp::{Ordering, max, min};
use std::collections::LinkedList;
-use std::fmt;
use std::sync::{Arc, Mutex};
use style::arc_ptr_eq;
use style::computed_values::{border_collapse, box_sizing, clear, color, display, mix_blend_mode};
-use style::computed_values::{overflow_wrap, overflow_x, position, text_decoration};
+use style::computed_values::{overflow_wrap, overflow_x, position, text_decoration, transform};
use style::computed_values::{transform_style, vertical_align, white_space, word_break, z_index};
use style::computed_values::content::ContentItem;
use style::logical_geometry::{Direction, LogicalMargin, LogicalRect, LogicalSize, WritingMode};
@@ -49,7 +49,7 @@ use style::properties::ServoComputedValues;
use style::selector_parser::RestyleDamage;
use style::servo::restyle_damage::RECONSTRUCT_FLOW;
use style::str::char_is_whitespace;
-use style::values::Either;
+use style::values::{self, Either};
use style::values::computed::{LengthOrPercentage, LengthOrPercentageOrAuto};
use text;
use text::TextRunScanner;
@@ -2737,6 +2737,93 @@ impl Fragment {
SpecificFragmentInfo::UnscannedText(_) => true
}
}
+
+ /// Returns the 4D matrix representing this fragment's transform.
+ pub fn transform_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Matrix4D<f32> {
+ let mut transform = Matrix4D::identity();
+ let operations = match self.style.get_box().transform.0 {
+ None => return transform,
+ Some(ref operations) => operations,
+ };
+
+ let transform_origin = &self.style.get_box().transform_origin;
+ let transform_origin_x = model::specified(transform_origin.horizontal,
+ stacking_relative_border_box.size
+ .width).to_f32_px();
+ let transform_origin_y = model::specified(transform_origin.vertical,
+ stacking_relative_border_box.size
+ .height).to_f32_px();
+ let transform_origin_z = transform_origin.depth.to_f32_px();
+
+ let pre_transform = Matrix4D::create_translation(transform_origin_x,
+ transform_origin_y,
+ transform_origin_z);
+ let post_transform = Matrix4D::create_translation(-transform_origin_x,
+ -transform_origin_y,
+ -transform_origin_z);
+
+ for operation in operations {
+ let matrix = match *operation {
+ transform::ComputedOperation::Rotate(ax, ay, az, theta) => {
+ let theta = 2.0f32 * f32::consts::PI - theta.radians();
+ Matrix4D::create_rotation(ax, ay, az, Radians::new(theta))
+ }
+ transform::ComputedOperation::Perspective(d) => {
+ create_perspective_matrix(d)
+ }
+ transform::ComputedOperation::Scale(sx, sy, sz) => {
+ Matrix4D::create_scale(sx, sy, sz)
+ }
+ transform::ComputedOperation::Translate(tx, ty, tz) => {
+ let tx =
+ model::specified(tx, stacking_relative_border_box.size.width).to_f32_px();
+ let ty =
+ model::specified(ty, stacking_relative_border_box.size.height).to_f32_px();
+ let tz = tz.to_f32_px();
+ Matrix4D::create_translation(tx, ty, tz)
+ }
+ transform::ComputedOperation::Matrix(m) => {
+ m.to_gfx_matrix()
+ }
+ transform::ComputedOperation::Skew(theta_x, theta_y) => {
+ Matrix4D::create_skew(Radians::new(theta_x.radians()),
+ Radians::new(theta_y.radians()))
+ }
+ };
+
+ transform = transform.pre_mul(&matrix);
+ }
+
+ pre_transform.pre_mul(&transform).pre_mul(&post_transform)
+ }
+
+ /// Returns the 4D matrix representing this fragment's perspective.
+ pub fn perspective_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Matrix4D<f32> {
+ match self.style().get_box().perspective {
+ Either::First(length) => {
+ let perspective_origin = self.style().get_box().perspective_origin;
+ let perspective_origin =
+ Point2D::new(model::specified(perspective_origin.horizontal,
+ stacking_relative_border_box.size.width).to_f32_px(),
+ model::specified(perspective_origin.vertical,
+ stacking_relative_border_box.size.height).to_f32_px());
+
+ let pre_transform = Matrix4D::create_translation(perspective_origin.x,
+ perspective_origin.y,
+ 0.0);
+ let post_transform = Matrix4D::create_translation(-perspective_origin.x,
+ -perspective_origin.y,
+ 0.0);
+
+ let perspective_matrix = create_perspective_matrix(length);
+
+ pre_transform.pre_mul(&perspective_matrix).pre_mul(&post_transform)
+ }
+ Either::Second(values::None_) => {
+ Matrix4D::identity()
+ }
+ }
+ }
}
impl fmt::Debug for Fragment {
@@ -2970,3 +3057,20 @@ impl Serialize for DebugId {
serializer.serialize_u16(self.0)
}
}
+
+// TODO(gw): The transforms spec says that perspective length must
+// be positive. However, there is some confusion between the spec
+// and browser implementations as to handling the case of 0 for the
+// perspective value. Until the spec bug is resolved, at least ensure
+// that a provided perspective value of <= 0.0 doesn't cause panics
+// and behaves as it does in other browsers.
+// See https://lists.w3.org/Archives/Public/www-style/2016Jan/0020.html for more details.
+#[inline]
+fn create_perspective_matrix(d: Au) -> Matrix4D<f32> {
+ let d = d.to_f32_px();
+ if d <= 0.0 {
+ Matrix4D::identity()
+ } else {
+ Matrix4D::create_perspective(d)
+ }
+}
diff --git a/components/profile/time.rs b/components/profile/time.rs
index e02de039360..be3be16655f 100644
--- a/components/profile/time.rs
+++ b/components/profile/time.rs
@@ -151,6 +151,7 @@ impl Formattable for ProfilerCategory {
ProfilerCategory::ScriptServiceWorkerEvent => "Script Service Worker Event",
ProfilerCategory::ScriptEnterFullscreen => "Script Enter Fullscreen",
ProfilerCategory::ScriptExitFullscreen => "Script Exit Fullscreen",
+ ProfilerCategory::ScriptWebVREvent => "Script WebVR Event",
ProfilerCategory::ApplicationHeartbeat => "Application Heartbeat",
};
format!("{}{}", padding, name)
diff --git a/components/profile_traits/time.rs b/components/profile_traits/time.rs
index f7b62e17139..7bbebf5f465 100644
--- a/components/profile_traits/time.rs
+++ b/components/profile_traits/time.rs
@@ -88,6 +88,7 @@ pub enum ProfilerCategory {
ScriptParseXML = 0x76,
ScriptEnterFullscreen = 0x77,
ScriptExitFullscreen = 0x78,
+ ScriptWebVREvent = 0x79,
ApplicationHeartbeat = 0x90,
}
diff --git a/components/script/Cargo.toml b/components/script/Cargo.toml
index 74397e07138..3ef402a4605 100644
--- a/components/script/Cargo.toml
+++ b/components/script/Cargo.toml
@@ -82,6 +82,8 @@ url = {version = "1.2", features = ["heap_size", "query_encoding"]}
uuid = {version = "0.3.1", features = ["v4"]}
websocket = "0.17"
xml5ever = {version = "0.3.1", features = ["unstable"]}
+webvr = {path = "../webvr"}
+webvr_traits = {path = "../webvr_traits"}
[dependencies.webrender_traits]
git = "https://github.com/servo/webrender"
diff --git a/components/script/dom/bindings/conversions.rs b/components/script/dom/bindings/conversions.rs
index 18a2deb8f0f..577cecb2fd4 100644
--- a/components/script/dom/bindings/conversions.rs
+++ b/components/script/dom/bindings/conversions.rs
@@ -51,6 +51,9 @@ use js::jsapi::{JSObject, JSString, JS_GetArrayBufferViewType};
use js::jsapi::{JS_GetLatin1StringCharsAndLength, JS_GetObjectAsArrayBuffer, JS_GetObjectAsArrayBufferView};
use js::jsapi::{JS_GetReservedSlot, JS_GetTwoByteStringCharsAndLength};
use js::jsapi::{JS_IsArrayObject, JS_NewStringCopyN, JS_StringHasLatin1Chars};
+use js::jsapi::{JS_NewFloat32Array, JS_NewFloat64Array};
+use js::jsapi::{JS_NewInt8Array, JS_NewInt16Array, JS_NewInt32Array};
+use js::jsapi::{JS_NewUint8Array, JS_NewUint16Array, JS_NewUint32Array};
use js::jsapi::{MutableHandleValue, Type};
use js::jsval::{ObjectValue, StringValue};
use js::rust::{ToString, get_object_class, is_dom_class, is_dom_object, maybe_wrap_value};
@@ -463,6 +466,9 @@ pub unsafe trait ArrayBufferViewContents: Clone {
/// Check if the JS ArrayBufferView type is compatible with the implementor of the
/// trait
fn is_type_compatible(ty: Type) -> bool;
+
+ /// Creates a typed array
+ unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject;
}
unsafe impl ArrayBufferViewContents for u8 {
@@ -473,47 +479,79 @@ unsafe impl ArrayBufferViewContents for u8 {
_ => false,
}
}
+
+ unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
+ JS_NewUint8Array(cx, num)
+ }
}
unsafe impl ArrayBufferViewContents for i8 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Int8 as i32
}
+
+ unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
+ JS_NewInt8Array(cx, num)
+ }
}
unsafe impl ArrayBufferViewContents for u16 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Uint16 as i32
}
+
+ unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
+ JS_NewUint16Array(cx, num)
+ }
}
unsafe impl ArrayBufferViewContents for i16 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Int16 as i32
}
+
+ unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
+ JS_NewInt16Array(cx, num)
+ }
}
unsafe impl ArrayBufferViewContents for u32 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Uint32 as i32
}
+
+ unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
+ JS_NewUint32Array(cx, num)
+ }
}
unsafe impl ArrayBufferViewContents for i32 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Int32 as i32
}
+
+ unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
+ JS_NewInt32Array(cx, num)
+ }
}
unsafe impl ArrayBufferViewContents for f32 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Float32 as i32
}
+
+ unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
+ JS_NewFloat32Array(cx, num)
+ }
}
unsafe impl ArrayBufferViewContents for f64 {
fn is_type_compatible(ty: Type) -> bool {
ty as i32 == Type::Float64 as i32
}
+
+ unsafe fn new(cx: *mut JSContext, num: u32) -> *mut JSObject {
+ JS_NewFloat64Array(cx, num)
+ }
}
/// Returns a mutable slice of the Array Buffer View data, viewed as T, without
@@ -595,3 +633,23 @@ pub unsafe fn is_array_like(cx: *mut JSContext, value: HandleValue) -> bool {
assert!(JS_IsArrayObject(cx, value, &mut result));
result
}
+
+/// Creates a typed JS array from a Rust slice
+pub unsafe fn slice_to_array_buffer_view<T>(cx: *mut JSContext, data: &[T]) -> *mut JSObject
+ where T: ArrayBufferViewContents
+{
+ let js_object = T::new(cx, data.len() as u32);
+ assert!(!js_object.is_null());
+ update_array_buffer_view(js_object, data);
+ js_object
+}
+
+/// Updates a typed JS array from a Rust slice
+pub unsafe fn update_array_buffer_view<T>(obj: *mut JSObject, data: &[T])
+ where T: ArrayBufferViewContents
+{
+ let mut buffer = array_buffer_view_data(obj);
+ if let Some(ref mut buffer) = buffer {
+ ptr::copy_nonoverlapping(&data[0], &mut buffer[0], data.len())
+ }
+}
diff --git a/components/script/dom/mod.rs b/components/script/dom/mod.rs
index 4f94ac8309f..55b56977cd9 100644
--- a/components/script/dom/mod.rs
+++ b/components/script/dom/mod.rs
@@ -429,6 +429,15 @@ pub mod validation;
pub mod validitystate;
pub mod values;
pub mod virtualmethods;
+pub mod vr;
+pub mod vrdisplay;
+pub mod vrdisplaycapabilities;
+pub mod vrdisplayevent;
+pub mod vreyeparameters;
+pub mod vrfieldofview;
+pub mod vrframedata;
+pub mod vrpose;
+pub mod vrstageparameters;
pub mod webgl_validations;
pub mod webglactiveinfo;
pub mod webglbuffer;
diff --git a/components/script/dom/navigator.rs b/components/script/dom/navigator.rs
index b7d781f5eb0..dd9a1a789c6 100644
--- a/components/script/dom/navigator.rs
+++ b/components/script/dom/navigator.rs
@@ -12,7 +12,9 @@ use dom::mimetypearray::MimeTypeArray;
use dom::navigatorinfo;
use dom::pluginarray::PluginArray;
use dom::serviceworkercontainer::ServiceWorkerContainer;
+use dom::vr::VR;
use dom::window::Window;
+use script_traits::WebVREventMsg;
#[dom_struct]
pub struct Navigator {
@@ -21,6 +23,7 @@ pub struct Navigator {
plugins: MutNullableJS<PluginArray>,
mime_types: MutNullableJS<MimeTypeArray>,
service_worker: MutNullableJS<ServiceWorkerContainer>,
+ vr: MutNullableJS<VR>
}
impl Navigator {
@@ -31,6 +34,7 @@ impl Navigator {
plugins: Default::default(),
mime_types: Default::default(),
service_worker: Default::default(),
+ vr: Default::default(),
}
}
@@ -114,4 +118,16 @@ impl NavigatorMethods for Navigator {
true
}
+ #[allow(unrooted_must_root)]
+ // https://w3c.github.io/webvr/#interface-navigator
+ fn Vr(&self) -> Root<VR> {
+ self.vr.or_init(|| VR::new(&self.global()))
+ }
+}
+
+impl Navigator {
+ pub fn handle_webvr_event(&self, event: WebVREventMsg) {
+ self.vr.get().expect("Shouldn't arrive here with an empty VR instance")
+ .handle_webvr_event(event);
+ }
}
diff --git a/components/script/dom/vr.rs b/components/script/dom/vr.rs
new file mode 100644
index 00000000000..6e077c415ac
--- /dev/null
+++ b/components/script/dom/vr.rs
@@ -0,0 +1,160 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use dom::bindings::cell::DOMRefCell;
+use dom::bindings::codegen::Bindings::VRBinding;
+use dom::bindings::codegen::Bindings::VRBinding::VRMethods;
+use dom::bindings::error::Error;
+use dom::bindings::inheritance::Castable;
+use dom::bindings::js::{JS, Root};
+use dom::bindings::reflector::{DomObject, reflect_dom_object};
+use dom::event::Event;
+use dom::eventtarget::EventTarget;
+use dom::globalscope::GlobalScope;
+use dom::promise::Promise;
+use dom::vrdisplay::VRDisplay;
+use dom::vrdisplayevent::VRDisplayEvent;
+use ipc_channel::ipc;
+use ipc_channel::ipc::IpcSender;
+use script_traits::WebVREventMsg;
+use std::rc::Rc;
+use webvr_traits::WebVRMsg;
+use webvr_traits::webvr;
+
+#[dom_struct]
+pub struct VR {
+ eventtarget: EventTarget,
+ displays: DOMRefCell<Vec<JS<VRDisplay>>>
+}
+
+impl VR {
+ fn new_inherited() -> VR {
+ VR {
+ eventtarget: EventTarget::new_inherited(),
+ displays: DOMRefCell::new(Vec::new())
+ }
+ }
+
+ pub fn new(global: &GlobalScope) -> Root<VR> {
+ let root = reflect_dom_object(box VR::new_inherited(),
+ global,
+ VRBinding::Wrap);
+ root.register();
+ root
+ }
+}
+
+impl Drop for VR {
+ fn drop(&mut self) {
+ self.unregister();
+ }
+}
+
+impl VRMethods for VR {
+ #[allow(unrooted_must_root)]
+ // https://w3c.github.io/webvr/#interface-navigator
+ fn GetDisplays(&self) -> Rc<Promise> {
+ let promise = Promise::new(&self.global());
+
+ if let Some(webvr_thread) = self.webvr_thread() {
+ let (sender, receiver) = ipc::channel().unwrap();
+ webvr_thread.send(WebVRMsg::GetDisplays(sender)).unwrap();
+ match receiver.recv().unwrap() {
+ Ok(displays) => {
+ // Sync displays
+ for display in displays {
+ self.sync_display(&display);
+ }
+ },
+ Err(e) => {
+ promise.reject_native(promise.global().get_cx(), &e);
+ return promise;
+ }
+ }
+ } else {
+ // WebVR spec: The Promise MUST be rejected if WebVR is not enabled/supported.
+ promise.reject_error(promise.global().get_cx(), Error::Security);
+ return promise;
+ }
+
+ // convert from JS to Root
+ let displays: Vec<Root<VRDisplay>> = self.displays.borrow().iter()
+ .map(|d| Root::from_ref(&**d))
+ .collect();
+ promise.resolve_native(promise.global().get_cx(), &displays);
+
+ promise
+ }
+}
+
+
+impl VR {
+ fn webvr_thread(&self) -> Option<IpcSender<WebVRMsg>> {
+ self.global().as_window().webvr_thread()
+ }
+
+ fn find_display(&self, display_id: u64) -> Option<Root<VRDisplay>> {
+ self.displays.borrow()
+ .iter()
+ .find(|d| d.get_display_id() == display_id)
+ .map(|d| Root::from_ref(&**d))
+ }
+
+ fn register(&self) {
+ if let Some(webvr_thread) = self.webvr_thread() {
+ let msg = WebVRMsg::RegisterContext(self.global().pipeline_id());
+ webvr_thread.send(msg).unwrap();
+ }
+ }
+
+ fn unregister(&self) {
+ if let Some(webvr_thread) = self.webvr_thread() {
+ let msg = WebVRMsg::UnregisterContext(self.global().pipeline_id());
+ webvr_thread.send(msg).unwrap();
+ }
+ }
+
+ fn sync_display(&self, display: &webvr::VRDisplayData) -> Root<VRDisplay> {
+ if let Some(existing) = self.find_display(display.display_id) {
+ existing.update_display(&display);
+ existing
+ } else {
+ let root = VRDisplay::new(&self.global(), display.clone());
+ self.displays.borrow_mut().push(JS::from_ref(&*root));
+ root
+ }
+ }
+
+ pub fn handle_webvr_event(&self, event: WebVREventMsg) {
+ let WebVREventMsg::DisplayEvent(event) = event;
+ match &event {
+ &webvr::VRDisplayEvent::Connect(ref display) => {
+ let display = self.sync_display(&display);
+ display.handle_webvr_event(&event);
+ self.notify_event(&display, &event);
+ },
+ &webvr::VRDisplayEvent::Disconnect(id) => {
+ if let Some(display) = self.find_display(id) {
+ display.handle_webvr_event(&event);
+ self.notify_event(&display, &event);
+ }
+ },
+ &webvr::VRDisplayEvent::Activate(ref display, _) |
+ &webvr::VRDisplayEvent::Deactivate(ref display, _) |
+ &webvr::VRDisplayEvent::Blur(ref display) |
+ &webvr::VRDisplayEvent::Focus(ref display) |
+ &webvr::VRDisplayEvent::PresentChange(ref display, _) |
+ &webvr::VRDisplayEvent::Change(ref display) => {
+ let display = self.sync_display(&display);
+ display.handle_webvr_event(&event);
+ }
+ };
+ }
+
+ fn notify_event(&self, display: &VRDisplay, event: &webvr::VRDisplayEvent) {
+ let event = VRDisplayEvent::new_from_webvr(&self.global(), &display, &event);
+ event.upcast::<Event>().fire(self.upcast());
+ }
+}
+
diff --git a/components/script/dom/vrdisplay.rs b/components/script/dom/vrdisplay.rs
new file mode 100644
index 00000000000..bb7d1811464
--- /dev/null
+++ b/components/script/dom/vrdisplay.rs
@@ -0,0 +1,607 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use canvas_traits::CanvasMsg;
+use core::ops::Deref;
+use dom::bindings::callback::ExceptionHandling;
+use dom::bindings::cell::DOMRefCell;
+use dom::bindings::codegen::Bindings::PerformanceBinding::PerformanceBinding::PerformanceMethods;
+use dom::bindings::codegen::Bindings::VRDisplayBinding;
+use dom::bindings::codegen::Bindings::VRDisplayBinding::VRDisplayMethods;
+use dom::bindings::codegen::Bindings::VRDisplayBinding::VREye;
+use dom::bindings::codegen::Bindings::VRLayerBinding::VRLayer;
+use dom::bindings::codegen::Bindings::WindowBinding::FrameRequestCallback;
+use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
+use dom::bindings::inheritance::Castable;
+use dom::bindings::js::{MutNullableJS, MutJS, Root};
+use dom::bindings::num::Finite;
+use dom::bindings::refcounted::Trusted;
+use dom::bindings::reflector::{DomObject, reflect_dom_object};
+use dom::bindings::str::DOMString;
+use dom::event::Event;
+use dom::eventtarget::EventTarget;
+use dom::globalscope::GlobalScope;
+use dom::promise::Promise;
+use dom::vrdisplaycapabilities::VRDisplayCapabilities;
+use dom::vrdisplayevent::VRDisplayEvent;
+use dom::vreyeparameters::VREyeParameters;
+use dom::vrframedata::VRFrameData;
+use dom::vrpose::VRPose;
+use dom::vrstageparameters::VRStageParameters;
+use dom::webglrenderingcontext::WebGLRenderingContext;
+use ipc_channel::ipc;
+use ipc_channel::ipc::{IpcSender, IpcReceiver};
+use js::jsapi::JSContext;
+use script_runtime::CommonScriptMsg;
+use script_runtime::ScriptThreadEventCategory::WebVREvent;
+use script_thread::Runnable;
+use std::cell::Cell;
+use std::mem;
+use std::rc::Rc;
+use std::sync::mpsc;
+use std::thread;
+use webrender_traits::VRCompositorCommand;
+use webvr_traits::{WebVRDisplayData, WebVRDisplayEvent, WebVRFrameData, WebVRLayer, WebVRMsg};
+
+#[dom_struct]
+pub struct VRDisplay {
+ eventtarget: EventTarget,
+ #[ignore_heap_size_of = "Defined in rust-webvr"]
+ display: DOMRefCell<WebVRDisplayData>,
+ depth_near: Cell<f64>,
+ depth_far: Cell<f64>,
+ presenting: Cell<bool>,
+ left_eye_params: MutJS<VREyeParameters>,
+ right_eye_params: MutJS<VREyeParameters>,
+ capabilities: MutJS<VRDisplayCapabilities>,
+ stage_params: MutNullableJS<VRStageParameters>,
+ #[ignore_heap_size_of = "Defined in rust-webvr"]
+ frame_data: DOMRefCell<WebVRFrameData>,
+ #[ignore_heap_size_of = "Defined in rust-webvr"]
+ layer: DOMRefCell<WebVRLayer>,
+ layer_ctx: MutNullableJS<WebGLRenderingContext>,
+ #[ignore_heap_size_of = "Defined in rust-webvr"]
+ next_raf_id: Cell<u32>,
+ /// List of request animation frame callbacks
+ #[ignore_heap_size_of = "closures are hard"]
+ raf_callback_list: DOMRefCell<Vec<(u32, Option<Rc<FrameRequestCallback>>)>>,
+ // Compositor VRFrameData synchonization
+ frame_data_status: Cell<VRFrameDataStatus>,
+ #[ignore_heap_size_of = "channels are hard"]
+ frame_data_receiver: DOMRefCell<Option<IpcReceiver<Result<Vec<u8>, ()>>>>,
+}
+
+unsafe_no_jsmanaged_fields!(WebVRDisplayData);
+unsafe_no_jsmanaged_fields!(WebVRFrameData);
+unsafe_no_jsmanaged_fields!(WebVRLayer);
+
+#[derive(Clone, Copy, PartialEq, Eq, HeapSizeOf)]
+enum VRFrameDataStatus {
+ Waiting,
+ Synced,
+ Exit
+}
+
+unsafe_no_jsmanaged_fields!(VRFrameDataStatus);
+
+impl VRDisplay {
+ fn new_inherited(global: &GlobalScope, display: WebVRDisplayData) -> VRDisplay {
+ let stage = match display.stage_parameters {
+ Some(ref params) => Some(VRStageParameters::new(params.clone(), &global)),
+ None => None
+ };
+
+ VRDisplay {
+ eventtarget: EventTarget::new_inherited(),
+ display: DOMRefCell::new(display.clone()),
+ depth_near: Cell::new(0.01),
+ depth_far: Cell::new(10000.0),
+ presenting: Cell::new(false),
+ left_eye_params: MutJS::new(&*VREyeParameters::new(display.left_eye_parameters.clone(), &global)),
+ right_eye_params: MutJS::new(&*VREyeParameters::new(display.right_eye_parameters.clone(), &global)),
+ capabilities: MutJS::new(&*VRDisplayCapabilities::new(display.capabilities.clone(), &global)),
+ stage_params: MutNullableJS::new(stage.as_ref().map(|v| v.deref())),
+ frame_data: DOMRefCell::new(Default::default()),
+ layer: DOMRefCell::new(Default::default()),
+ layer_ctx: MutNullableJS::default(),
+ next_raf_id: Cell::new(1),
+ raf_callback_list: DOMRefCell::new(vec![]),
+ frame_data_status: Cell::new(VRFrameDataStatus::Waiting),
+ frame_data_receiver: DOMRefCell::new(None),
+ }
+ }
+
+ pub fn new(global: &GlobalScope, display: WebVRDisplayData) -> Root<VRDisplay> {
+ reflect_dom_object(box VRDisplay::new_inherited(&global, display),
+ global,
+ VRDisplayBinding::Wrap)
+ }
+}
+
+impl Drop for VRDisplay {
+ fn drop(&mut self) {
+ if self.presenting.get() {
+ self.force_stop_present();
+ }
+ }
+}
+
+impl VRDisplayMethods for VRDisplay {
+ // https://w3c.github.io/webvr/#dom-vrdisplay-isconnected
+ fn IsConnected(&self) -> bool {
+ self.display.borrow().connected
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-ispresenting
+ fn IsPresenting(&self) -> bool {
+ self.presenting.get()
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-capabilities
+ fn Capabilities(&self) -> Root<VRDisplayCapabilities> {
+ Root::from_ref(&*self.capabilities.get())
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-stageparameters
+ fn GetStageParameters(&self) -> Option<Root<VRStageParameters>> {
+ self.stage_params.get().map(|s| Root::from_ref(&*s))
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-geteyeparameters
+ fn GetEyeParameters(&self, eye: VREye) -> Root<VREyeParameters> {
+ match eye {
+ VREye::Left => Root::from_ref(&*self.left_eye_params.get()),
+ VREye::Right => Root::from_ref(&*self.right_eye_params.get())
+ }
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-displayid
+ fn DisplayId(&self) -> u32 {
+ self.display.borrow().display_id as u32
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-displayname
+ fn DisplayName(&self) -> DOMString {
+ DOMString::from(self.display.borrow().display_name.clone())
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-getframedata-framedata-framedata
+ fn GetFrameData(&self, frameData: &VRFrameData) -> bool {
+ // If presenting we use a synced data with compositor for the whole frame
+ if self.presenting.get() {
+ if self.frame_data_status.get() == VRFrameDataStatus::Waiting {
+ self.sync_frame_data();
+ }
+ frameData.update(& self.frame_data.borrow());
+ return true;
+ }
+
+ // If not presenting we fetch inmediante VRFrameData
+ let (sender, receiver) = ipc::channel().unwrap();
+ self.webvr_thread().send(WebVRMsg::GetFrameData(self.global().pipeline_id(),
+ self.get_display_id(),
+ self.depth_near.get(),
+ self.depth_far.get(),
+ sender)).unwrap();
+ return match receiver.recv().unwrap() {
+ Ok(data) => {
+ frameData.update(&data);
+ true
+ },
+ Err(e) => {
+ error!("WebVR::GetFrameData: {:?}", e);
+ false
+ }
+ };
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-getpose
+ fn GetPose(&self) -> Root<VRPose> {
+ VRPose::new(&self.global(), &self.frame_data.borrow().pose)
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-resetpose
+ fn ResetPose(&self) -> () {
+ let (sender, receiver) = ipc::channel().unwrap();
+ self.webvr_thread().send(WebVRMsg::ResetPose(self.global().pipeline_id(),
+ self.get_display_id(),
+ sender)).unwrap();
+ if let Ok(data) = receiver.recv().unwrap() {
+ // Some VRDisplay data might change after calling ResetPose()
+ *self.display.borrow_mut() = data;
+ }
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-depthnear
+ fn DepthNear(&self) -> Finite<f64> {
+ Finite::wrap(self.depth_near.get())
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-depthnear
+ fn SetDepthNear(&self, value: Finite<f64>) -> () {
+ self.depth_near.set(*value.deref());
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-depthfar
+ fn DepthFar(&self) -> Finite<f64> {
+ Finite::wrap(self.depth_far.get())
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-depthfar
+ fn SetDepthFar(&self, value: Finite<f64>) -> () {
+ self.depth_far.set(*value.deref());
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-requestanimationframe
+ fn RequestAnimationFrame(&self, callback: Rc<FrameRequestCallback>) -> u32 {
+ if self.presenting.get() {
+ let raf_id = self.next_raf_id.get();
+ self.next_raf_id.set(raf_id + 1);
+ self.raf_callback_list.borrow_mut().push((raf_id, Some(callback)));
+ raf_id
+ } else {
+ // WebVR spec: When a VRDisplay is not presenting it should
+ // fallback to window.requestAnimationFrame.
+ self.global().as_window().RequestAnimationFrame(callback)
+ }
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-cancelanimationframe
+ fn CancelAnimationFrame(&self, handle: u32) -> () {
+ if self.presenting.get() {
+ let mut list = self.raf_callback_list.borrow_mut();
+ if let Some(mut pair) = list.iter_mut().find(|pair| pair.0 == handle) {
+ pair.1 = None;
+ }
+ } else {
+ // WebVR spec: When a VRDisplay is not presenting it should
+ // fallback to window.cancelAnimationFrame.
+ self.global().as_window().CancelAnimationFrame(handle);
+ }
+ }
+
+ #[allow(unrooted_must_root)]
+ // https://w3c.github.io/webvr/#dom-vrdisplay-requestpresent
+ fn RequestPresent(&self, layers: Vec<VRLayer>) -> Rc<Promise> {
+ let promise = Promise::new(&self.global());
+ // TODO: WebVR spec: this method must be called in response to a user gesture
+
+ // WebVR spec: If canPresent is false the promise MUST be rejected
+ if !self.display.borrow().capabilities.can_present {
+ let msg = "VRDisplay canPresent is false".to_string();
+ promise.reject_native(promise.global().get_cx(), &msg);
+ return promise;
+ }
+
+ // Current WebVRSpec only allows 1 VRLayer if the VRDevice can present.
+ // Future revisions of this spec may allow multiple layers to enable more complex rendering effects
+ // such as compositing WebGL and DOM elements together.
+ // That functionality is not allowed by this revision of the spec.
+ if layers.len() != 1 {
+ let msg = "The number of layers must be 1".to_string();
+ promise.reject_native(promise.global().get_cx(), &msg);
+ return promise;
+ }
+
+ // Parse and validate received VRLayer
+ let layer = validate_layer(self.global().get_cx(), &layers[0]);
+
+ let layer_bounds;
+ let layer_ctx;
+
+ match layer {
+ Ok((bounds, ctx)) => {
+ layer_bounds = bounds;
+ layer_ctx = ctx;
+ },
+ Err(msg) => {
+ let msg = msg.to_string();
+ promise.reject_native(promise.global().get_cx(), &msg);
+ return promise;
+ }
+ };
+
+ // WebVR spec: Repeat calls while already presenting will update the VRLayers being displayed.
+ if self.presenting.get() {
+ *self.layer.borrow_mut() = layer_bounds;
+ self.layer_ctx.set(Some(&layer_ctx));
+ promise.resolve_native(promise.global().get_cx(), &());
+ return promise;
+ }
+
+ // Request Present
+ let (sender, receiver) = ipc::channel().unwrap();
+ self.webvr_thread().send(WebVRMsg::RequestPresent(self.global().pipeline_id(),
+ self.display.borrow().display_id,
+ sender))
+ .unwrap();
+ match receiver.recv().unwrap() {
+ Ok(()) => {
+ *self.layer.borrow_mut() = layer_bounds;
+ self.layer_ctx.set(Some(&layer_ctx));
+ self.init_present();
+ promise.resolve_native(promise.global().get_cx(), &());
+ },
+ Err(e) => {
+ promise.reject_native(promise.global().get_cx(), &e);
+ }
+ }
+
+ promise
+ }
+
+ #[allow(unrooted_must_root)]
+ // https://w3c.github.io/webvr/#dom-vrdisplay-exitpresent
+ fn ExitPresent(&self) -> Rc<Promise> {
+ let promise = Promise::new(&self.global());
+
+ // WebVR spec: If the VRDisplay is not presenting the promise MUST be rejected.
+ if !self.presenting.get() {
+ let msg = "VRDisplay is not presenting".to_string();
+ promise.reject_native(promise.global().get_cx(), &msg);
+ return promise;
+ }
+
+ // Exit present
+ let (sender, receiver) = ipc::channel().unwrap();
+ self.webvr_thread().send(WebVRMsg::ExitPresent(self.global().pipeline_id(),
+ self.display.borrow().display_id,
+ Some(sender)))
+ .unwrap();
+ match receiver.recv().unwrap() {
+ Ok(()) => {
+ self.stop_present();
+ promise.resolve_native(promise.global().get_cx(), &());
+ },
+ Err(e) => {
+ promise.reject_native(promise.global().get_cx(), &e);
+ }
+ }
+
+ promise
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplay-submitframe
+ fn SubmitFrame(&self) -> () {
+ if !self.presenting.get() {
+ warn!("VRDisplay not presenting");
+ return;
+ }
+
+ let api_sender = self.layer_ctx.get().unwrap().ipc_renderer();
+ let display_id = self.display.borrow().display_id;
+ let layer = self.layer.borrow();
+ let msg = VRCompositorCommand::SubmitFrame(display_id, layer.left_bounds, layer.right_bounds);
+ api_sender.send(CanvasMsg::WebVR(msg)).unwrap();
+ }
+}
+
+impl VRDisplay {
+ fn webvr_thread(&self) -> IpcSender<WebVRMsg> {
+ self.global().as_window().webvr_thread().expect("Shouldn't arrive here with WebVR disabled")
+ }
+
+ pub fn get_display_id(&self) -> u64 {
+ self.display.borrow().display_id
+ }
+
+ pub fn update_display(&self, display: &WebVRDisplayData) {
+ *self.display.borrow_mut() = display.clone();
+ if let Some(ref stage) = display.stage_parameters {
+ if self.stage_params.get().is_none() {
+ let params = Some(VRStageParameters::new(stage.clone(), &self.global()));
+ self.stage_params.set(params.as_ref().map(|v| v.deref()));
+ } else {
+ self.stage_params.get().unwrap().update(&stage);
+ }
+ } else {
+ self.stage_params.set(None);
+ }
+ }
+
+ pub fn handle_webvr_event(&self, event: &WebVRDisplayEvent) {
+ match *event {
+ WebVRDisplayEvent::Connect(ref display) => {
+ self.update_display(&display);
+ },
+ WebVRDisplayEvent::Disconnect(_id) => {
+ self.display.borrow_mut().connected = false;
+ },
+ WebVRDisplayEvent::Activate(ref display, _) |
+ WebVRDisplayEvent::Deactivate(ref display, _) |
+ WebVRDisplayEvent::Blur(ref display) |
+ WebVRDisplayEvent::Focus(ref display) => {
+ self.update_display(&display);
+ self.notify_event(&event);
+ },
+ WebVRDisplayEvent::PresentChange(ref display, presenting) => {
+ self.update_display(&display);
+ self.presenting.set(presenting);
+ self.notify_event(&event);
+ },
+ WebVRDisplayEvent::Change(ref display) => {
+ // Change event doesn't exist in WebVR spec.
+ // So we update display data but don't notify JS.
+ self.update_display(&display);
+ }
+ };
+ }
+
+ fn notify_event(&self, event: &WebVRDisplayEvent) {
+ let root = Root::from_ref(&*self);
+ let event = VRDisplayEvent::new_from_webvr(&self.global(), &root, &event);
+ event.upcast::<Event>().fire(self.upcast());
+ }
+
+ fn init_present(&self) {
+ self.presenting.set(true);
+ let (sync_sender, sync_receiver) = ipc::channel().unwrap();
+ *self.frame_data_receiver.borrow_mut() = Some(sync_receiver);
+
+ let display_id = self.display.borrow().display_id;
+ let api_sender = self.layer_ctx.get().unwrap().ipc_renderer();
+ let js_sender = self.global().script_chan();
+ let address = Trusted::new(&*self);
+ let near_init = self.depth_near.get();
+ let far_init = self.depth_far.get();
+
+ // The render loop at native headset frame rate is implemented using a dedicated thread.
+ // Every loop iteration syncs pose data with the HMD, submits the pixels to the display and waits for Vsync.
+ // Both the requestAnimationFrame call of a VRDisplay in the JavaScript thread and the VRSyncPoses call
+ // in the Webrender thread are executed in parallel. This allows to get some JavaScript code executed ahead.
+ // while the render thread is syncing the VRFrameData to be used for the current frame.
+ // This thread runs until the user calls ExitPresent, the tab is closed or some unexpected error happened.
+ thread::Builder::new().name("WebVR_RAF".into()).spawn(move || {
+ let (raf_sender, raf_receiver) = mpsc::channel();
+ let mut near = near_init;
+ let mut far = far_init;
+
+ // Initialize compositor
+ api_sender.send(CanvasMsg::WebVR(VRCompositorCommand::Create(display_id))).unwrap();
+ loop {
+ // Run RAF callbacks on JavaScript thread
+ let msg = box NotifyDisplayRAF {
+ address: address.clone(),
+ sender: raf_sender.clone()
+ };
+ js_sender.send(CommonScriptMsg::RunnableMsg(WebVREvent, msg)).unwrap();
+
+ // Run Sync Poses in parallell on Render thread
+ let msg = VRCompositorCommand::SyncPoses(display_id, near, far, sync_sender.clone());
+ api_sender.send(CanvasMsg::WebVR(msg)).unwrap();
+
+ // Wait until both SyncPoses & RAF ends
+ if let Ok(depth) = raf_receiver.recv().unwrap() {
+ near = depth.0;
+ far = depth.1;
+ } else {
+ // Stop thread
+ // ExitPresent called or some error happened
+ return;
+ }
+ }
+ }).expect("Thread spawning failed");
+ }
+
+ fn stop_present(&self) {
+ self.presenting.set(false);
+ *self.frame_data_receiver.borrow_mut() = None;
+
+ let api_sender = self.layer_ctx.get().unwrap().ipc_renderer();
+ let display_id = self.display.borrow().display_id;
+ let msg = VRCompositorCommand::Release(display_id);
+ api_sender.send(CanvasMsg::WebVR(msg)).unwrap();
+ }
+
+ // Only called when the JSContext is destroyed while presenting.
+ // In this case we don't want to wait for WebVR Thread response.
+ fn force_stop_present(&self) {
+ self.webvr_thread().send(WebVRMsg::ExitPresent(self.global().pipeline_id(),
+ self.display.borrow().display_id,
+ None))
+ .unwrap();
+ self.stop_present();
+ }
+
+ fn sync_frame_data(&self) {
+ let status = if let Some(receiver) = self.frame_data_receiver.borrow().as_ref() {
+ match receiver.recv().unwrap() {
+ Ok(bytes) => {
+ *self.frame_data.borrow_mut() = WebVRFrameData::from_bytes(&bytes[..]);
+ VRFrameDataStatus::Synced
+ },
+ Err(()) => {
+ VRFrameDataStatus::Exit
+ }
+ }
+ } else {
+ VRFrameDataStatus::Exit
+ };
+
+ self.frame_data_status.set(status);
+ }
+
+ fn handle_raf(&self, end_sender: &mpsc::Sender<Result<(f64, f64), ()>>) {
+ self.frame_data_status.set(VRFrameDataStatus::Waiting);
+
+ let mut callbacks = mem::replace(&mut *self.raf_callback_list.borrow_mut(), vec![]);
+ let now = self.global().as_window().Performance().Now();
+
+ // Call registered VRDisplay.requestAnimationFrame callbacks.
+ for (_, callback) in callbacks.drain(..) {
+ if let Some(callback) = callback {
+ let _ = callback.Call__(Finite::wrap(*now), ExceptionHandling::Report);
+ }
+ }
+
+ if self.frame_data_status.get() == VRFrameDataStatus::Waiting {
+ // User didn't call getFrameData while presenting.
+ // We automatically reads the pending VRFrameData to avoid overflowing the IPC-Channel buffers.
+ // Show a warning as the WebVR Spec recommends.
+ warn!("WebVR: You should call GetFrameData while presenting");
+ self.sync_frame_data();
+ }
+
+ match self.frame_data_status.get() {
+ VRFrameDataStatus::Synced => {
+ // Sync succeeded. Notify RAF thread.
+ end_sender.send(Ok((self.depth_near.get(), self.depth_far.get()))).unwrap();
+ },
+ VRFrameDataStatus::Exit | VRFrameDataStatus::Waiting => {
+ // ExitPresent called or some error ocurred.
+ // Notify VRDisplay RAF thread to stop.
+ end_sender.send(Err(())).unwrap();
+ }
+ }
+ }
+}
+
+struct NotifyDisplayRAF {
+ address: Trusted<VRDisplay>,
+ sender: mpsc::Sender<Result<(f64, f64), ()>>
+}
+
+impl Runnable for NotifyDisplayRAF {
+ fn name(&self) -> &'static str { "NotifyDisplayRAF" }
+
+ fn handler(self: Box<Self>) {
+ let display = self.address.root();
+ display.handle_raf(&self.sender);
+ }
+}
+
+
+// WebVR Spect: If the number of values in the leftBounds/rightBounds arrays
+// is not 0 or 4 for any of the passed layers the promise is rejected
+fn parse_bounds(src: &Option<Vec<Finite<f32>>>, dst: &mut [f32; 4]) -> Result<(), &'static str> {
+ match *src {
+ Some(ref values) => {
+ if values.len() == 0 {
+ return Ok(())
+ }
+ if values.len() != 4 {
+ return Err("The number of values in the leftBounds/rightBounds arrays must be 0 or 4")
+ }
+ for i in 0..4 {
+ dst[i] = *values[i].deref();
+ }
+ Ok(())
+ },
+ None => Ok(())
+ }
+}
+
+fn validate_layer(cx: *mut JSContext,
+ layer: &VRLayer)
+ -> Result<(WebVRLayer, Root<WebGLRenderingContext>), &'static str> {
+ let ctx = layer.source.as_ref().map(|ref s| s.get_or_init_webgl_context(cx, None)).unwrap_or(None);
+ if let Some(ctx) = ctx {
+ let mut data = WebVRLayer::default();
+ try!(parse_bounds(&layer.leftBounds, &mut data.left_bounds));
+ try!(parse_bounds(&layer.rightBounds, &mut data.right_bounds));
+ Ok((data, ctx))
+ } else {
+ Err("VRLayer source must be a WebGL Context")
+ }
+}
diff --git a/components/script/dom/vrdisplaycapabilities.rs b/components/script/dom/vrdisplaycapabilities.rs
new file mode 100644
index 00000000000..0b131f8ffd3
--- /dev/null
+++ b/components/script/dom/vrdisplaycapabilities.rs
@@ -0,0 +1,62 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use dom::bindings::cell::DOMRefCell;
+use dom::bindings::codegen::Bindings::VRDisplayCapabilitiesBinding;
+use dom::bindings::codegen::Bindings::VRDisplayCapabilitiesBinding::VRDisplayCapabilitiesMethods;
+use dom::bindings::js::Root;
+use dom::bindings::reflector::{Reflector, reflect_dom_object};
+use dom::globalscope::GlobalScope;
+use webvr_traits::WebVRDisplayCapabilities;
+
+#[dom_struct]
+pub struct VRDisplayCapabilities {
+ reflector_: Reflector,
+ #[ignore_heap_size_of = "Defined in rust-webvr"]
+ capabilities: DOMRefCell<WebVRDisplayCapabilities>
+}
+
+unsafe_no_jsmanaged_fields!(WebVRDisplayCapabilities);
+
+impl VRDisplayCapabilities {
+ fn new_inherited(capabilities: WebVRDisplayCapabilities) -> VRDisplayCapabilities {
+ VRDisplayCapabilities {
+ reflector_: Reflector::new(),
+ capabilities: DOMRefCell::new(capabilities)
+ }
+ }
+
+ pub fn new(capabilities: WebVRDisplayCapabilities, global: &GlobalScope) -> Root<VRDisplayCapabilities> {
+ reflect_dom_object(box VRDisplayCapabilities::new_inherited(capabilities),
+ global,
+ VRDisplayCapabilitiesBinding::Wrap)
+ }
+}
+
+impl VRDisplayCapabilitiesMethods for VRDisplayCapabilities {
+ // https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-hasposition
+ fn HasPosition(&self) -> bool {
+ self.capabilities.borrow().has_position
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-hasorientation
+ fn HasOrientation(&self) -> bool {
+ self.capabilities.borrow().has_orientation
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-hasexternaldisplay
+ fn HasExternalDisplay(&self) -> bool {
+ self.capabilities.borrow().has_external_display
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-canpresent
+ fn CanPresent(&self) -> bool {
+ self.capabilities.borrow().can_present
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrdisplaycapabilities-maxlayers
+ fn MaxLayers(&self) -> u32 {
+ if self.CanPresent() { 1 } else { 0 }
+ }
+}
diff --git a/components/script/dom/vrdisplayevent.rs b/components/script/dom/vrdisplayevent.rs
new file mode 100644
index 00000000000..b6257464e53
--- /dev/null
+++ b/components/script/dom/vrdisplayevent.rs
@@ -0,0 +1,116 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use dom::bindings::codegen::Bindings::EventBinding::EventBinding::EventMethods;
+use dom::bindings::codegen::Bindings::VRDisplayEventBinding;
+use dom::bindings::codegen::Bindings::VRDisplayEventBinding::VRDisplayEventMethods;
+use dom::bindings::codegen::Bindings::VRDisplayEventBinding::VRDisplayEventReason;
+use dom::bindings::error::Fallible;
+use dom::bindings::inheritance::Castable;
+use dom::bindings::js::{JS, Root};
+use dom::bindings::reflector::{DomObject, reflect_dom_object};
+use dom::bindings::str::DOMString;
+use dom::event::Event;
+use dom::globalscope::GlobalScope;
+use dom::vrdisplay::VRDisplay;
+use dom::window::Window;
+use servo_atoms::Atom;
+use webvr_traits::{WebVRDisplayEvent, WebVRDisplayEventReason};
+
+#[dom_struct]
+pub struct VRDisplayEvent {
+ event: Event,
+ display: JS<VRDisplay>,
+ reason: Option<VRDisplayEventReason>
+}
+
+impl VRDisplayEvent {
+ fn new_inherited(display: &VRDisplay,
+ reason: Option<VRDisplayEventReason>)
+ -> VRDisplayEvent {
+ VRDisplayEvent {
+ event: Event::new_inherited(),
+ display: JS::from_ref(display),
+ reason: reason.clone()
+ }
+ }
+
+ pub fn new(global: &GlobalScope,
+ type_: Atom,
+ bubbles: bool,
+ cancelable: bool,
+ display: &VRDisplay,
+ reason: Option<VRDisplayEventReason>)
+ -> Root<VRDisplayEvent> {
+ let ev = reflect_dom_object(box VRDisplayEvent::new_inherited(&display, reason),
+ global,
+ VRDisplayEventBinding::Wrap);
+ {
+ let event = ev.upcast::<Event>();
+ event.init_event(type_, bubbles, cancelable);
+ }
+ ev
+ }
+
+ pub fn new_from_webvr(global: &GlobalScope,
+ display: &VRDisplay,
+ event: &WebVRDisplayEvent)
+ -> Root<VRDisplayEvent> {
+ let (name, reason) = match *event {
+ WebVRDisplayEvent::Connect(_) => ("displayconnect", None),
+ WebVRDisplayEvent::Disconnect(_) => ("displaydisconnect", None),
+ WebVRDisplayEvent::Activate(_, reason) => ("activate", Some(reason)),
+ WebVRDisplayEvent::Deactivate(_, reason) => ("deactivate", Some(reason)),
+ WebVRDisplayEvent::Blur(_) => ("blur", None),
+ WebVRDisplayEvent::Focus(_) => ("focus", None),
+ WebVRDisplayEvent::PresentChange(_, _) => ("presentchange", None),
+ WebVRDisplayEvent::Change(_) => panic!("VRDisplayEvent:Change event not available in WebVR")
+ };
+
+ // map to JS enum values
+ let reason = reason.map(|r| {
+ match r {
+ WebVRDisplayEventReason::Navigation => VRDisplayEventReason::Navigation,
+ WebVRDisplayEventReason::Mounted => VRDisplayEventReason::Mounted,
+ WebVRDisplayEventReason::Unmounted => VRDisplayEventReason::Unmounted,
+ }
+ });
+
+ VRDisplayEvent::new(&global,
+ Atom::from(DOMString::from(name)),
+ false,
+ false,
+ &display,
+ reason)
+ }
+
+ pub fn Constructor(window: &Window,
+ type_: DOMString,
+ init: &VRDisplayEventBinding::VRDisplayEventInit)
+ -> Fallible<Root<VRDisplayEvent>> {
+ Ok(VRDisplayEvent::new(&window.global(),
+ Atom::from(type_),
+ init.parent.bubbles,
+ init.parent.cancelable,
+ &init.display,
+ init.reason))
+ }
+}
+
+impl VRDisplayEventMethods for VRDisplayEvent {
+ // https://w3c.github.io/webvr/#dom-vrdisplayevent-display
+ fn Display(&self) -> Root<VRDisplay> {
+ Root::from_ref(&*self.display)
+ }
+
+ // https://w3c.github.io/webvr/#enumdef-vrdisplayeventreason
+ fn GetReason(&self) -> Option<VRDisplayEventReason> {
+ self.reason
+ }
+
+ // https://dom.spec.whatwg.org/#dom-event-istrusted
+ fn IsTrusted(&self) -> bool {
+ self.event.IsTrusted()
+ }
+}
diff --git a/components/script/dom/vreyeparameters.rs b/components/script/dom/vreyeparameters.rs
new file mode 100644
index 00000000000..9a5e2066e2c
--- /dev/null
+++ b/components/script/dom/vreyeparameters.rs
@@ -0,0 +1,75 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use core::nonzero::NonZero;
+use dom::bindings::cell::DOMRefCell;
+use dom::bindings::codegen::Bindings::VREyeParametersBinding;
+use dom::bindings::codegen::Bindings::VREyeParametersBinding::VREyeParametersMethods;
+use dom::bindings::conversions::slice_to_array_buffer_view;
+use dom::bindings::js::{JS, Root};
+use dom::bindings::reflector::{Reflector, reflect_dom_object};
+use dom::globalscope::GlobalScope;
+use dom::vrfieldofview::VRFieldOfView;
+use js::jsapi::{Heap, JSContext, JSObject};
+use std::default::Default;
+use webvr_traits::WebVREyeParameters;
+
+#[dom_struct]
+pub struct VREyeParameters {
+ reflector_: Reflector,
+ #[ignore_heap_size_of = "Defined in rust-webvr"]
+ parameters: DOMRefCell<WebVREyeParameters>,
+ offset: Heap<*mut JSObject>,
+ fov: JS<VRFieldOfView>,
+}
+
+unsafe_no_jsmanaged_fields!(WebVREyeParameters);
+
+impl VREyeParameters {
+ #[allow(unsafe_code)]
+ #[allow(unrooted_must_root)]
+ fn new_inherited(parameters: WebVREyeParameters, global: &GlobalScope) -> VREyeParameters {
+ let fov = VRFieldOfView::new(&global, parameters.field_of_view.clone());
+ let mut result = VREyeParameters {
+ reflector_: Reflector::new(),
+ parameters: DOMRefCell::new(parameters),
+ offset: Heap::default(),
+ fov: JS::from_ref(&*fov)
+ };
+
+ unsafe {
+ result.offset.set(slice_to_array_buffer_view(global.get_cx(), &result.parameters.borrow().offset));
+ }
+ result
+ }
+
+ pub fn new(parameters: WebVREyeParameters, global: &GlobalScope) -> Root<VREyeParameters> {
+ reflect_dom_object(box VREyeParameters::new_inherited(parameters, global),
+ global,
+ VREyeParametersBinding::Wrap)
+ }
+}
+
+impl VREyeParametersMethods for VREyeParameters {
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vreyeparameters-offset
+ unsafe fn Offset(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
+ NonZero::new(self.offset.get())
+ }
+
+ // https://w3c.github.io/webvr/#dom-vreyeparameters-fieldofview
+ fn FieldOfView(&self) -> Root<VRFieldOfView> {
+ Root::from_ref(&*self.fov)
+ }
+
+ // https://w3c.github.io/webvr/#dom-vreyeparameters-renderwidth
+ fn RenderWidth(&self) -> u32 {
+ self.parameters.borrow().render_width
+ }
+
+ // https://w3c.github.io/webvr/#dom-vreyeparameters-renderheight
+ fn RenderHeight(&self) -> u32 {
+ self.parameters.borrow().render_height
+ }
+}
diff --git a/components/script/dom/vrfieldofview.rs b/components/script/dom/vrfieldofview.rs
new file mode 100644
index 00000000000..5103e1ec781
--- /dev/null
+++ b/components/script/dom/vrfieldofview.rs
@@ -0,0 +1,58 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use dom::bindings::cell::DOMRefCell;
+use dom::bindings::codegen::Bindings::VRFieldOfViewBinding;
+use dom::bindings::codegen::Bindings::VRFieldOfViewBinding::VRFieldOfViewMethods;
+use dom::bindings::js::Root;
+use dom::bindings::num::Finite;
+use dom::bindings::reflector::{Reflector, reflect_dom_object};
+use dom::globalscope::GlobalScope;
+use webvr_traits::WebVRFieldOfView;
+
+#[dom_struct]
+pub struct VRFieldOfView {
+ reflector_: Reflector,
+ #[ignore_heap_size_of = "Defined in rust-webvr"]
+ fov: DOMRefCell<WebVRFieldOfView>
+}
+
+unsafe_no_jsmanaged_fields!(WebVRFieldOfView);
+
+impl VRFieldOfView {
+ fn new_inherited(fov: WebVRFieldOfView) -> VRFieldOfView {
+ VRFieldOfView {
+ reflector_: Reflector::new(),
+ fov: DOMRefCell::new(fov)
+ }
+ }
+
+ pub fn new(global: &GlobalScope, fov: WebVRFieldOfView) -> Root<VRFieldOfView> {
+ reflect_dom_object(box VRFieldOfView::new_inherited(fov),
+ global,
+ VRFieldOfViewBinding::Wrap)
+ }
+}
+
+impl VRFieldOfViewMethods for VRFieldOfView {
+ // https://w3c.github.io/webvr/#interface-interface-vrfieldofview
+ fn UpDegrees(&self) -> Finite<f64> {
+ Finite::wrap(self.fov.borrow().up_degrees)
+ }
+
+ // https://w3c.github.io/webvr/#interface-interface-vrfieldofview
+ fn RightDegrees(&self) -> Finite<f64> {
+ Finite::wrap(self.fov.borrow().right_degrees)
+ }
+
+ // https://w3c.github.io/webvr/#interface-interface-vrfieldofview
+ fn DownDegrees(&self) -> Finite<f64> {
+ Finite::wrap(self.fov.borrow().down_degrees)
+ }
+
+ // https://w3c.github.io/webvr/#interface-interface-vrfieldofview
+ fn LeftDegrees(&self) -> Finite<f64> {
+ Finite::wrap(self.fov.borrow().left_degrees)
+ }
+}
diff --git a/components/script/dom/vrframedata.rs b/components/script/dom/vrframedata.rs
new file mode 100644
index 00000000000..4c1d14d66a6
--- /dev/null
+++ b/components/script/dom/vrframedata.rs
@@ -0,0 +1,122 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use core::nonzero::NonZero;
+use dom::bindings::codegen::Bindings::VRFrameDataBinding;
+use dom::bindings::codegen::Bindings::VRFrameDataBinding::VRFrameDataMethods;
+use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
+use dom::bindings::error::Fallible;
+use dom::bindings::js::{JS, Root};
+use dom::bindings::num::Finite;
+use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
+use dom::globalscope::GlobalScope;
+use dom::vrpose::VRPose;
+use dom::window::Window;
+use js::jsapi::{Heap, JSContext, JSObject};
+use std::cell::Cell;
+use webvr_traits::WebVRFrameData;
+
+#[dom_struct]
+pub struct VRFrameData {
+ reflector_: Reflector,
+ left_proj: Heap<*mut JSObject>,
+ left_view: Heap<*mut JSObject>,
+ right_proj: Heap<*mut JSObject>,
+ right_view: Heap<*mut JSObject>,
+ pose: JS<VRPose>,
+ timestamp: Cell<f64>,
+ first_timestamp: Cell<f64>
+}
+
+impl VRFrameData {
+ #[allow(unsafe_code)]
+ #[allow(unrooted_must_root)]
+ fn new(global: &GlobalScope) -> Root<VRFrameData> {
+ let matrix = [1.0, 0.0, 0.0, 0.0,
+ 0.0, 1.0, 0.0, 0.0,
+ 0.0, 0.0, 1.0, 0.0,
+ 0.0, 0.0, 0.0, 1.0f32];
+ let pose = VRPose::new(&global, &Default::default());
+
+ let mut framedata = VRFrameData {
+ reflector_: Reflector::new(),
+ left_proj: Heap::default(),
+ left_view: Heap::default(),
+ right_proj: Heap::default(),
+ right_view: Heap::default(),
+ pose: JS::from_ref(&*pose),
+ timestamp: Cell::new(0.0),
+ first_timestamp: Cell::new(0.0)
+ };
+
+ unsafe {
+ framedata.left_proj.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
+ framedata.left_view.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
+ framedata.right_proj.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
+ framedata.right_view.set(slice_to_array_buffer_view(global.get_cx(), &matrix));
+ }
+
+ reflect_dom_object(box framedata,
+ global,
+ VRFrameDataBinding::Wrap)
+ }
+
+ pub fn Constructor(window: &Window) -> Fallible<Root<VRFrameData>> {
+ Ok(VRFrameData::new(&window.global()))
+ }
+}
+
+
+impl VRFrameData {
+ #[allow(unsafe_code)]
+ pub fn update(&self, data: &WebVRFrameData) {
+ unsafe {
+ update_array_buffer_view(self.left_proj.get(), &data.left_projection_matrix);
+ update_array_buffer_view(self.left_view.get(), &data.left_view_matrix);
+ update_array_buffer_view(self.right_proj.get(), &data.right_projection_matrix);
+ update_array_buffer_view(self.right_view.get(), &data.right_view_matrix);
+ }
+ self.pose.update(&data.pose);
+ self.timestamp.set(data.timestamp);
+ if self.first_timestamp.get() == 0.0 {
+ self.first_timestamp.set(data.timestamp);
+ }
+ }
+}
+
+impl VRFrameDataMethods for VRFrameData {
+ // https://w3c.github.io/webvr/#dom-vrframedata-timestamp
+ fn Timestamp(&self) -> Finite<f64> {
+ Finite::wrap(self.timestamp.get() - self.first_timestamp.get())
+ }
+
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrframedata-leftprojectionmatrix
+ unsafe fn LeftProjectionMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
+ NonZero::new(self.left_proj.get())
+ }
+
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrframedata-leftviewmatrix
+ unsafe fn LeftViewMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
+ NonZero::new(self.left_view.get())
+ }
+
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrframedata-rightprojectionmatrix
+ unsafe fn RightProjectionMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
+ NonZero::new(self.right_proj.get())
+ }
+
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrframedata-rightviewmatrix
+ unsafe fn RightViewMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
+ NonZero::new(self.right_view.get())
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrframedata-pose
+ fn Pose(&self) -> Root<VRPose> {
+ Root::from_ref(&*self.pose)
+ }
+}
diff --git a/components/script/dom/vrpose.rs b/components/script/dom/vrpose.rs
new file mode 100644
index 00000000000..a6cb09d96bf
--- /dev/null
+++ b/components/script/dom/vrpose.rs
@@ -0,0 +1,133 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use core::nonzero::NonZero;
+use dom::bindings::cell::DOMRefCell;
+use dom::bindings::codegen::Bindings::VRPoseBinding;
+use dom::bindings::codegen::Bindings::VRPoseBinding::VRPoseMethods;
+use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
+use dom::bindings::js::Root;
+use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
+use dom::globalscope::GlobalScope;
+use js::jsapi::{Heap, JSContext, JSObject};
+use std::ptr;
+use webvr_traits::webvr;
+
+#[dom_struct]
+pub struct VRPose {
+ reflector_: Reflector,
+ position: DOMRefCell<Heap<*mut JSObject>>,
+ orientation: DOMRefCell<Heap<*mut JSObject>>,
+ linear_vel: DOMRefCell<Heap<*mut JSObject>>,
+ angular_vel: DOMRefCell<Heap<*mut JSObject>>,
+ linear_acc: DOMRefCell<Heap<*mut JSObject>>,
+ angular_acc: DOMRefCell<Heap<*mut JSObject>>
+}
+
+#[allow(unsafe_code)]
+unsafe fn update_or_create_typed_array(cx: *mut JSContext,
+ src: Option<&[f32]>,
+ dst: &DOMRefCell<Heap<*mut JSObject>>) {
+ let mut dst = dst.borrow_mut();
+ match src {
+ Some(ref data) => {
+ if dst.get().is_null() {
+ dst.set(slice_to_array_buffer_view(cx, &data));
+ } else {
+ update_array_buffer_view(dst.get(), &data);
+ }
+ },
+ None => {
+ if !dst.get().is_null() {
+ dst.set(ptr::null_mut());
+ }
+ }
+ }
+}
+
+#[inline]
+#[allow(unsafe_code)]
+fn heap_to_option(heap: &DOMRefCell<Heap<*mut JSObject>>) -> Option<NonZero<*mut JSObject>> {
+ let js_object = heap.borrow_mut().get();
+ if js_object.is_null() {
+ None
+ } else {
+ unsafe {
+ Some(NonZero::new(js_object))
+ }
+ }
+}
+
+impl VRPose {
+ fn new_inherited() -> VRPose {
+ VRPose {
+ reflector_: Reflector::new(),
+ position: DOMRefCell::new(Heap::default()),
+ orientation: DOMRefCell::new(Heap::default()),
+ linear_vel: DOMRefCell::new(Heap::default()),
+ angular_vel: DOMRefCell::new(Heap::default()),
+ linear_acc: DOMRefCell::new(Heap::default()),
+ angular_acc: DOMRefCell::new(Heap::default())
+ }
+ }
+
+ pub fn new(global: &GlobalScope, pose: &webvr::VRPose) -> Root<VRPose> {
+ let root = reflect_dom_object(box VRPose::new_inherited(),
+ global,
+ VRPoseBinding::Wrap);
+ root.update(&pose);
+ root
+ }
+
+ #[allow(unsafe_code)]
+ pub fn update(&self, pose: &webvr::VRPose) {
+ let cx = self.global().get_cx();
+ unsafe {
+ update_or_create_typed_array(cx, pose.position.as_ref().map(|v| &v[..]), &self.position);
+ update_or_create_typed_array(cx, pose.orientation.as_ref().map(|v| &v[..]), &self.orientation);
+ update_or_create_typed_array(cx, pose.linear_velocity.as_ref().map(|v| &v[..]), &self.linear_vel);
+ update_or_create_typed_array(cx, pose.angular_velocity.as_ref().map(|v| &v[..]), &self.angular_vel);
+ update_or_create_typed_array(cx, pose.linear_acceleration.as_ref().map(|v| &v[..]), &self.linear_acc);
+ update_or_create_typed_array(cx, pose.angular_acceleration.as_ref().map(|v| &v[..]), &self.angular_acc);
+ }
+ }
+}
+
+impl VRPoseMethods for VRPose {
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrpose-position
+ unsafe fn GetPosition(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
+ heap_to_option(&self.position)
+ }
+
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrpose-linearvelocity
+ unsafe fn GetLinearVelocity(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
+ heap_to_option(&self.linear_vel)
+ }
+
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrpose-linearacceleration
+ unsafe fn GetLinearAcceleration(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
+ heap_to_option(&self.linear_acc)
+ }
+
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrpose-orientation
+ unsafe fn GetOrientation(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
+ heap_to_option(&self.orientation)
+ }
+
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrpose-angularvelocity
+ unsafe fn GetAngularVelocity(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
+ heap_to_option(&self.angular_vel)
+ }
+
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrpose-angularacceleration
+ unsafe fn GetAngularAcceleration(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
+ heap_to_option(&self.angular_acc)
+ }
+}
diff --git a/components/script/dom/vrstageparameters.rs b/components/script/dom/vrstageparameters.rs
new file mode 100644
index 00000000000..ac3636aac91
--- /dev/null
+++ b/components/script/dom/vrstageparameters.rs
@@ -0,0 +1,75 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use core::nonzero::NonZero;
+use dom::bindings::cell::DOMRefCell;
+use dom::bindings::codegen::Bindings::VRStageParametersBinding;
+use dom::bindings::codegen::Bindings::VRStageParametersBinding::VRStageParametersMethods;
+use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
+use dom::bindings::js::Root;
+use dom::bindings::num::Finite;
+use dom::bindings::reflector::{Reflector, reflect_dom_object};
+use dom::globalscope::GlobalScope;
+use js::jsapi::{Heap, JSContext, JSObject};
+use webvr_traits::WebVRStageParameters;
+
+#[dom_struct]
+pub struct VRStageParameters {
+ reflector_: Reflector,
+ #[ignore_heap_size_of = "Defined in rust-webvr"]
+ parameters: DOMRefCell<WebVRStageParameters>,
+ transform: Heap<*mut JSObject>,
+}
+
+unsafe_no_jsmanaged_fields!(WebVRStageParameters);
+
+impl VRStageParameters {
+ #[allow(unsafe_code)]
+ #[allow(unrooted_must_root)]
+ fn new_inherited(parameters: WebVRStageParameters, global: &GlobalScope) -> VRStageParameters {
+ let mut stage = VRStageParameters {
+ reflector_: Reflector::new(),
+ parameters: DOMRefCell::new(parameters),
+ transform: Heap::default()
+ };
+ unsafe {
+ stage.transform.set(slice_to_array_buffer_view(global.get_cx(),
+ &stage.parameters.borrow().sitting_to_standing_transform));
+ }
+
+ stage
+ }
+
+ pub fn new(parameters: WebVRStageParameters, global: &GlobalScope) -> Root<VRStageParameters> {
+ reflect_dom_object(box VRStageParameters::new_inherited(parameters, global),
+ global,
+ VRStageParametersBinding::Wrap)
+ }
+
+ #[allow(unsafe_code)]
+ pub fn update(&self, parameters: &WebVRStageParameters) {
+ unsafe {
+ update_array_buffer_view(self.transform.get(), &parameters.sitting_to_standing_transform);
+ }
+ *self.parameters.borrow_mut() = parameters.clone();
+ }
+}
+
+impl VRStageParametersMethods for VRStageParameters {
+ #[allow(unsafe_code)]
+ // https://w3c.github.io/webvr/#dom-vrstageparameters-sittingtostandingtransform
+ unsafe fn SittingToStandingTransform(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
+ NonZero::new(self.transform.get())
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrstageparameters-sizex
+ fn SizeX(&self) -> Finite<f32> {
+ Finite::wrap(self.parameters.borrow().size_x)
+ }
+
+ // https://w3c.github.io/webvr/#dom-vrstageparameters-sizez
+ fn SizeZ(&self) -> Finite<f32> {
+ Finite::wrap(self.parameters.borrow().size_z)
+ }
+}
diff --git a/components/script/dom/webidls/Navigator.webidl b/components/script/dom/webidls/Navigator.webidl
index 84d5a7c3ccb..495222cecd2 100644
--- a/components/script/dom/webidls/Navigator.webidl
+++ b/components/script/dom/webidls/Navigator.webidl
@@ -57,3 +57,8 @@ interface NavigatorPlugins {
interface NavigatorCookies {
readonly attribute boolean cookieEnabled;
};
+
+// https://w3c.github.io/webvr/#interface-navigator
+partial interface Navigator {
+ [SameObject, Pref="dom.webvr.enabled"] readonly attribute VR vr;
+};
diff --git a/components/script/dom/webidls/VR.webidl b/components/script/dom/webidls/VR.webidl
new file mode 100644
index 00000000000..0fded365be0
--- /dev/null
+++ b/components/script/dom/webidls/VR.webidl
@@ -0,0 +1,10 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// https://w3c.github.io/webvr/#interface-navigator
+[Pref="dom.webvr.enabled"]
+interface VR: EventTarget {
+ Promise<sequence<VRDisplay>> getDisplays();
+ //readonly attribute FrozenArray<VRDisplay> activeVRDisplays;
+};
diff --git a/components/script/dom/webidls/VRDisplay.webidl b/components/script/dom/webidls/VRDisplay.webidl
new file mode 100644
index 00000000000..6822a994a9b
--- /dev/null
+++ b/components/script/dom/webidls/VRDisplay.webidl
@@ -0,0 +1,131 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+enum VREye {
+ "left",
+ "right"
+};
+
+
+// https://w3c.github.io/webvr/#interface-vrdisplay
+[Pref="dom.webvr.enabled"]
+interface VRDisplay : EventTarget {
+ readonly attribute boolean isConnected;
+ readonly attribute boolean isPresenting;
+
+ /**
+ * Dictionary of capabilities describing the VRDisplay.
+ */
+ [SameObject] readonly attribute VRDisplayCapabilities capabilities;
+
+ /**
+ * If this VRDisplay supports room-scale experiences, the optional
+ * stage attribute contains details on the room-scale parameters.
+ * The stageParameters attribute can not change between null
+ * and non-null once the VRDisplay is enumerated; however,
+ * the values within VRStageParameters may change after
+ * any call to VRDisplay.submitFrame as the user may re-configure
+ * their environment at any time.
+ */
+ readonly attribute VRStageParameters? stageParameters;
+
+ /**
+ * Return the current VREyeParameters for the given eye.
+ */
+ VREyeParameters getEyeParameters(VREye whichEye);
+
+ /**
+ * An identifier for this distinct VRDisplay. Used as an
+ * association point in the Gamepad API.
+ */
+ readonly attribute unsigned long displayId;
+
+ /**
+ * A display name, a user-readable name identifying it.
+ */
+ readonly attribute DOMString displayName;
+
+ /**
+ * Populates the passed VRFrameData with the information required to render
+ * the current frame.
+ */
+ boolean getFrameData(VRFrameData frameData);
+
+ /**
+ * Return a VRPose containing the future predicted pose of the VRDisplay
+ * when the current frame will be presented. The value returned will not
+ * change until JavaScript has returned control to the browser.
+ *
+ * The VRPose will contain the position, orientation, velocity,
+ * and acceleration of each of these properties.
+ */
+ [NewObject] VRPose getPose();
+
+ /**
+ * Reset the pose for this display, treating its current position and
+ * orientation as the "origin/zero" values. VRPose.position,
+ * VRPose.orientation, and VRStageParameters.sittingToStandingTransform may be
+ * updated when calling resetPose(). This should be called in only
+ * sitting-space experiences.
+ */
+ void resetPose();
+
+ /**
+ * z-depth defining the near plane of the eye view frustum
+ * enables mapping of values in the render target depth
+ * attachment to scene coordinates. Initially set to 0.01.
+ */
+ attribute double depthNear;
+
+ /**
+ * z-depth defining the far plane of the eye view frustum
+ * enables mapping of values in the render target depth
+ * attachment to scene coordinates. Initially set to 10000.0.
+ */
+ attribute double depthFar;
+
+ /**
+ * The callback passed to `requestAnimationFrame` will be called
+ * any time a new frame should be rendered. When the VRDisplay is
+ * presenting the callback will be called at the native refresh
+ * rate of the HMD. When not presenting this function acts
+ * identically to how window.requestAnimationFrame acts. Content should
+ * make no assumptions of frame rate or vsync behavior as the HMD runs
+ * asynchronously from other displays and at differing refresh rates.
+ */
+ unsigned long requestAnimationFrame(FrameRequestCallback callback);
+
+ /**
+ * Passing the value returned by `requestAnimationFrame` to
+ * `cancelAnimationFrame` will unregister the callback.
+ */
+ void cancelAnimationFrame(unsigned long handle);
+
+ /**
+ * Begin presenting to the VRDisplay. Must be called in response to a user gesture.
+ * Repeat calls while already presenting will update the VRLayers being displayed.
+ * If the number of values in the leftBounds/rightBounds arrays is not 0 or 4 for
+ * any of the passed layers the promise is rejected.
+ * If the source of any of the layers is not present (null), the promise is rejected.
+ */
+ Promise<void> requestPresent(sequence<VRLayer> layers);
+
+ /**
+ * Stops presenting to the VRDisplay.
+ */
+ Promise<void> exitPresent();
+
+ /**
+ * Get the layers currently being presented.
+ */
+ //sequence<VRLayer> getLayers();
+
+ /**
+ * The VRLayer provided to the VRDisplay will be captured and presented
+ * in the HMD. Calling this function has the same effect on the source
+ * canvas as any other operation that uses its source image, and canvases
+ * created without preserveDrawingBuffer set to true will be cleared.
+ */
+ void submitFrame();
+};
diff --git a/components/script/dom/webidls/VRDisplayCapabilities.webidl b/components/script/dom/webidls/VRDisplayCapabilities.webidl
new file mode 100644
index 00000000000..2d9cccd6a97
--- /dev/null
+++ b/components/script/dom/webidls/VRDisplayCapabilities.webidl
@@ -0,0 +1,13 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// https://w3c.github.io/webvr/#interface-vrdisplaycapabilities
+[Pref="dom.webvr.enabled"]
+interface VRDisplayCapabilities {
+ readonly attribute boolean hasPosition;
+ readonly attribute boolean hasOrientation;
+ readonly attribute boolean hasExternalDisplay;
+ readonly attribute boolean canPresent;
+ readonly attribute unsigned long maxLayers;
+};
diff --git a/components/script/dom/webidls/VRDisplayEvent.webidl b/components/script/dom/webidls/VRDisplayEvent.webidl
new file mode 100644
index 00000000000..df0990d32a8
--- /dev/null
+++ b/components/script/dom/webidls/VRDisplayEvent.webidl
@@ -0,0 +1,23 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// https://w3c.github.io/webvr/#interface-vrdisplayevent
+
+enum VRDisplayEventReason {
+ "navigation",
+ "mounted",
+ "unmounted",
+ "requested"
+};
+
+[Pref="dom.webvr.enabled", Constructor(DOMString type, VRDisplayEventInit eventInitDict)]
+interface VRDisplayEvent : Event {
+ readonly attribute VRDisplay display;
+ readonly attribute VRDisplayEventReason? reason;
+};
+
+dictionary VRDisplayEventInit : EventInit {
+ required VRDisplay display;
+ VRDisplayEventReason reason;
+};
diff --git a/components/script/dom/webidls/VREyeParameters.webidl b/components/script/dom/webidls/VREyeParameters.webidl
new file mode 100644
index 00000000000..5d127f20784
--- /dev/null
+++ b/components/script/dom/webidls/VREyeParameters.webidl
@@ -0,0 +1,13 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// https://w3c.github.io/webvr/#interface-vreyeparameters
+
+[Pref="dom.webvr.enabled"]
+interface VREyeParameters {
+ readonly attribute Float32Array offset;
+ [SameObject] readonly attribute VRFieldOfView fieldOfView;
+ readonly attribute unsigned long renderWidth;
+ readonly attribute unsigned long renderHeight;
+};
diff --git a/components/script/dom/webidls/VRFieldOfView.webidl b/components/script/dom/webidls/VRFieldOfView.webidl
new file mode 100644
index 00000000000..b562c5b0e7f
--- /dev/null
+++ b/components/script/dom/webidls/VRFieldOfView.webidl
@@ -0,0 +1,13 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// https://w3c.github.io/webvr/#interface-vrfieldofview
+
+[Pref="dom.webvr.enabled"]
+interface VRFieldOfView {
+ readonly attribute double upDegrees;
+ readonly attribute double rightDegrees;
+ readonly attribute double downDegrees;
+ readonly attribute double leftDegrees;
+};
diff --git a/components/script/dom/webidls/VRFrameData.webidl b/components/script/dom/webidls/VRFrameData.webidl
new file mode 100644
index 00000000000..baa37ea6f68
--- /dev/null
+++ b/components/script/dom/webidls/VRFrameData.webidl
@@ -0,0 +1,15 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// https://w3c.github.io/webvr/#interface-vrframedata
+
+[Pref="dom.webvr.enabled", Constructor]
+interface VRFrameData {
+ readonly attribute DOMHighResTimeStamp timestamp;
+ readonly attribute Float32Array leftProjectionMatrix;
+ readonly attribute Float32Array leftViewMatrix;
+ readonly attribute Float32Array rightProjectionMatrix;
+ readonly attribute Float32Array rightViewMatrix;
+ readonly attribute VRPose pose;
+};
diff --git a/components/script/dom/webidls/VRLayer.webidl b/components/script/dom/webidls/VRLayer.webidl
new file mode 100644
index 00000000000..47b30b324f7
--- /dev/null
+++ b/components/script/dom/webidls/VRLayer.webidl
@@ -0,0 +1,13 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// https://w3c.github.io/webvr/#interface-vrlayer
+
+//typedef (HTMLCanvasElement or OffscreenCanvas) VRSource;
+
+dictionary VRLayer {
+ HTMLCanvasElement source;
+ sequence<float> leftBounds;
+ sequence<float> rightBounds;
+};
diff --git a/components/script/dom/webidls/VRPose.webidl b/components/script/dom/webidls/VRPose.webidl
new file mode 100644
index 00000000000..ffbd931cfbc
--- /dev/null
+++ b/components/script/dom/webidls/VRPose.webidl
@@ -0,0 +1,14 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// https://w3c.github.io/webvr/#interface-vrpose
+[Pref="dom.webvr.enabled"]
+interface VRPose {
+ readonly attribute Float32Array? position;
+ readonly attribute Float32Array? linearVelocity;
+ readonly attribute Float32Array? linearAcceleration;
+ readonly attribute Float32Array? orientation;
+ readonly attribute Float32Array? angularVelocity;
+ readonly attribute Float32Array? angularAcceleration;
+};
diff --git a/components/script/dom/webidls/VRStageParameters.webidl b/components/script/dom/webidls/VRStageParameters.webidl
new file mode 100644
index 00000000000..255a8f07e76
--- /dev/null
+++ b/components/script/dom/webidls/VRStageParameters.webidl
@@ -0,0 +1,11 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// https://w3c.github.io/webvr/#interface-vrstageparameters
+[Pref="dom.webvr.enabled"]
+interface VRStageParameters {
+ readonly attribute Float32Array sittingToStandingTransform;
+ readonly attribute float sizeX;
+ readonly attribute float sizeZ;
+};
diff --git a/components/script/dom/window.rs b/components/script/dom/window.rs
index 87d15ab1616..4eab03eaeee 100644
--- a/components/script/dom/window.rs
+++ b/components/script/dom/window.rs
@@ -110,6 +110,7 @@ use timers::{IsInterval, TimerCallback};
use tinyfiledialogs::{self, MessageBoxIcon};
use url::Position;
use webdriver_handlers::jsval_to_webdriver;
+use webvr_traits::WebVRMsg;
/// Current state of the window object
#[derive(JSTraceable, Copy, Clone, Debug, PartialEq, HeapSizeOf)]
@@ -241,6 +242,10 @@ pub struct Window {
media_query_lists: WeakMediaQueryListVec,
test_runner: MutNullableJS<TestRunner>,
+
+ /// A handle for communicating messages to the webvr thread, if available.
+ #[ignore_heap_size_of = "channels are hard"]
+ webvr_thread: Option<IpcSender<WebVRMsg>>
}
impl Window {
@@ -321,6 +326,10 @@ impl Window {
pub fn current_viewport(&self) -> Rect<Au> {
self.current_viewport.clone().get()
}
+
+ pub fn webvr_thread(&self) -> Option<IpcSender<WebVRMsg>> {
+ self.webvr_thread.clone()
+ }
}
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
@@ -1590,7 +1599,8 @@ impl Window {
layout_chan: Sender<Msg>,
id: PipelineId,
parent_info: Option<(PipelineId, FrameType)>,
- window_size: Option<WindowSizeData>)
+ window_size: Option<WindowSizeData>,
+ webvr_thread: Option<IpcSender<WebVRMsg>>)
-> Root<Window> {
let layout_rpc: Box<LayoutRPC + Send> = {
let (rpc_send, rpc_recv) = channel();
@@ -1654,6 +1664,7 @@ impl Window {
scroll_offsets: DOMRefCell::new(HashMap::new()),
media_query_lists: WeakMediaQueryListVec::new(),
test_runner: Default::default(),
+ webvr_thread: webvr_thread
};
unsafe {
diff --git a/components/script/lib.rs b/components/script/lib.rs
index f477e562bbf..127194ebf65 100644
--- a/components/script/lib.rs
+++ b/components/script/lib.rs
@@ -98,6 +98,7 @@ extern crate url;
extern crate uuid;
extern crate webrender_traits;
extern crate websocket;
+extern crate webvr_traits;
extern crate xml5ever;
mod body;
diff --git a/components/script/script_runtime.rs b/components/script/script_runtime.rs
index bd8874dc475..75ce94084f0 100644
--- a/components/script/script_runtime.rs
+++ b/components/script/script_runtime.rs
@@ -80,6 +80,7 @@ pub enum ScriptThreadEventCategory {
ServiceWorkerEvent,
EnterFullscreen,
ExitFullscreen,
+ WebVREvent
}
/// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM
diff --git a/components/script/script_thread.rs b/components/script/script_thread.rs
index 407a32cb1de..cb6be4e9be0 100644
--- a/components/script/script_thread.rs
+++ b/components/script/script_thread.rs
@@ -90,6 +90,7 @@ use script_traits::{ScriptThreadFactory, TimerEvent, TimerEventRequest, TimerSou
use script_traits::{TouchEventType, TouchId, UntrustedNodeAddress, WindowSizeData, WindowSizeType};
use script_traits::CompositorEvent::{KeyEvent, MouseButtonEvent, MouseMoveEvent, ResizeEvent};
use script_traits::CompositorEvent::{TouchEvent, TouchpadPressureEvent};
+use script_traits::WebVREventMsg;
use script_traits::webdriver_msg::WebDriverScriptCommand;
use serviceworkerjob::{Job, JobQueue, AsyncJobHandler, FinishJobHandler, InvokeType, SettleType};
use servo_config::opts;
@@ -116,6 +117,7 @@ use task_source::user_interaction::{UserInteractionTask, UserInteractionTaskSour
use time::Tm;
use url::Position;
use webdriver_handlers;
+use webvr_traits::WebVRMsg;
thread_local!(pub static STACK_ROOTS: Cell<Option<RootCollectionPtr>> = Cell::new(None));
thread_local!(static SCRIPT_THREAD_ROOT: Cell<Option<*const ScriptThread>> = Cell::new(None));
@@ -477,6 +479,9 @@ pub struct ScriptThread {
content_process_shutdown_chan: IpcSender<()>,
promise_job_queue: PromiseJobQueue,
+
+ /// A handle to the webvr thread, if available
+ webvr_thread: Option<IpcSender<WebVRMsg>>,
}
/// In the event of thread panic, all data on the stack runs its destructor. However, there
@@ -699,6 +704,8 @@ impl ScriptThread {
promise_job_queue: PromiseJobQueue::new(),
layout_to_constellation_chan: state.layout_to_constellation_chan,
+
+ webvr_thread: state.webvr_thread
}
}
@@ -945,6 +952,7 @@ impl ScriptThread {
ScriptThreadEventCategory::SetViewport => ProfilerCategory::ScriptSetViewport,
ScriptThreadEventCategory::TimerEvent => ProfilerCategory::ScriptTimerEvent,
ScriptThreadEventCategory::WebSocketEvent => ProfilerCategory::ScriptWebSocketEvent,
+ ScriptThreadEventCategory::WebVREvent => ProfilerCategory::ScriptWebVREvent,
ScriptThreadEventCategory::WorkerEvent => ProfilerCategory::ScriptWorkerEvent,
ScriptThreadEventCategory::ServiceWorkerEvent => ProfilerCategory::ScriptServiceWorkerEvent,
ScriptThreadEventCategory::EnterFullscreen => ProfilerCategory::ScriptEnterFullscreen,
@@ -1009,6 +1017,8 @@ impl ScriptThread {
self.handle_reload(pipeline_id),
ConstellationControlMsg::ExitPipeline(pipeline_id, discard_browsing_context) =>
self.handle_exit_pipeline_msg(pipeline_id, discard_browsing_context),
+ ConstellationControlMsg::WebVREvent(pipeline_id, event) =>
+ self.handle_webvr_event(pipeline_id, event),
msg @ ConstellationControlMsg::AttachLayout(..) |
msg @ ConstellationControlMsg::Viewport(..) |
msg @ ConstellationControlMsg::SetScrollState(..) |
@@ -1751,7 +1761,8 @@ impl ScriptThread {
incomplete.layout_chan,
incomplete.pipeline_id,
incomplete.parent_info,
- incomplete.window_size);
+ incomplete.window_size,
+ self.webvr_thread.clone());
let frame_element = frame_element.r().map(Castable::upcast);
let browsing_context = BrowsingContext::new(&window, frame_element);
@@ -2212,6 +2223,14 @@ impl ScriptThread {
}
}
+ fn handle_webvr_event(&self, pipeline_id: PipelineId, event: WebVREventMsg) {
+ let window = self.documents.borrow().find_window(pipeline_id);
+ if let Some(window) = window {
+ let navigator = window.Navigator();
+ navigator.handle_webvr_event(event);
+ }
+ }
+
pub fn enqueue_promise_job(job: EnqueuedPromiseCallback, global: &GlobalScope) {
SCRIPT_THREAD_ROOT.with(|root| {
let script_thread = unsafe { &*root.get().unwrap() };
diff --git a/components/script_layout_interface/wrapper_traits.rs b/components/script_layout_interface/wrapper_traits.rs
index 0103b185096..6ee40a343c1 100644
--- a/components/script_layout_interface/wrapper_traits.rs
+++ b/components/script_layout_interface/wrapper_traits.rs
@@ -394,7 +394,7 @@ pub trait ThreadSafeLayoutElement: Clone + Copy + Sized + Debug +
&context.default_computed_values,
false);
data.styles_mut().pseudos
- .insert(style_pseudo.clone(), new_style.unwrap());
+ .insert(style_pseudo.clone(), new_style);
}
}
PseudoElementCascadeType::Lazy => {
diff --git a/components/script_traits/Cargo.toml b/components/script_traits/Cargo.toml
index 7856e67307c..07311b1ce4a 100644
--- a/components/script_traits/Cargo.toml
+++ b/components/script_traits/Cargo.toml
@@ -35,3 +35,4 @@ servo_url = {path = "../url", features = ["servo"]}
style_traits = {path = "../style_traits", features = ["servo"]}
time = "0.1.12"
url = {version = "1.2", features = ["heap_size"]}
+webvr_traits = {path = "../webvr_traits"}
diff --git a/components/script_traits/lib.rs b/components/script_traits/lib.rs
index 3d66dade927..1916d666a3e 100644
--- a/components/script_traits/lib.rs
+++ b/components/script_traits/lib.rs
@@ -35,6 +35,7 @@ extern crate serde_derive;
extern crate servo_url;
extern crate style_traits;
extern crate time;
+extern crate webvr_traits;
mod script_msg;
pub mod webdriver_msg;
@@ -71,6 +72,7 @@ use std::fmt;
use std::sync::mpsc::{Receiver, Sender};
use style_traits::{PagePx, UnsafeNode, ViewportPx};
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
+use webvr_traits::{WebVRDisplayEvent, WebVRMsg};
pub use script_msg::{LayoutMsg, ScriptMsg, EventResult, LogEntry};
pub use script_msg::{ServiceWorkerMsg, ScopeThings, SWManagerMsg, SWManagerSenders, DOMMessage};
@@ -263,6 +265,8 @@ pub enum ConstellationControlMsg {
ReportCSSError(PipelineId, String, usize, usize, String),
/// Reload the given page.
Reload(PipelineId),
+ /// Notifies the script thread of a WebVR device event
+ WebVREvent(PipelineId, WebVREventMsg)
}
impl fmt::Debug for ConstellationControlMsg {
@@ -295,6 +299,7 @@ impl fmt::Debug for ConstellationControlMsg {
FramedContentChanged(..) => "FramedContentChanged",
ReportCSSError(..) => "ReportCSSError",
Reload(..) => "Reload",
+ WebVREvent(..) => "WebVREvent",
};
write!(formatter, "ConstellationMsg::{}", variant)
}
@@ -478,6 +483,8 @@ pub struct InitialScriptState {
pub pipeline_namespace_id: PipelineNamespaceId,
/// A ping will be sent on this channel once the script thread shuts down.
pub content_process_shutdown_chan: IpcSender<()>,
+ /// A channel to the webvr thread, if available.
+ pub webvr_thread: Option<IpcSender<WebVRMsg>>
}
/// This trait allows creating a `ScriptThread` without depending on the `script`
@@ -716,6 +723,18 @@ pub enum ConstellationMsg {
Reload,
/// A log entry, with the top-level frame id and thread name
LogEntry(Option<FrameId>, Option<String>, LogEntry),
+ /// Set the WebVR thread channel.
+ SetWebVRThread(IpcSender<WebVRMsg>),
+ /// Dispatch a WebVR event to the subscribed script threads.
+ WebVREvent(Vec<PipelineId>, WebVREventMsg),
+}
+
+/// Messages to the constellation originating from the WebVR thread.
+/// Used to dispatch VR Headset state events: connected, unconnected, and more.
+#[derive(Deserialize, Serialize, Clone)]
+pub enum WebVREventMsg {
+ /// Inform the constellation of a VR display event.
+ DisplayEvent(WebVRDisplayEvent)
}
/// Resources required by workerglobalscopes
diff --git a/components/servo/Cargo.toml b/components/servo/Cargo.toml
index 6400dd0caaa..06c1c3ab01e 100644
--- a/components/servo/Cargo.toml
+++ b/components/servo/Cargo.toml
@@ -54,6 +54,8 @@ servo_config = {path = "../config"}
servo_url = {path = "../url"}
style = {path = "../style", features = ["servo"]}
url = "1.2"
+webvr = {path = "../webvr"}
+webvr_traits = {path = "../webvr_traits"}
webdriver_server = {path = "../webdriver_server", optional = true}
[dependencies.webrender]
diff --git a/components/servo/lib.rs b/components/servo/lib.rs
index afb7f88b7ae..0bef98df9ab 100644
--- a/components/servo/lib.rs
+++ b/components/servo/lib.rs
@@ -48,6 +48,8 @@ pub extern crate script_layout_interface;
pub extern crate servo_config;
pub extern crate servo_url;
pub extern crate style;
+pub extern crate webvr;
+pub extern crate webvr_traits;
#[cfg(feature = "webdriver")]
extern crate webdriver_server;
@@ -96,6 +98,7 @@ use std::cmp::max;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::Sender;
+use webvr::{WebVRThread, WebVRCompositorHandler};
pub use gleam::gl;
pub use servo_config as config;
@@ -193,6 +196,7 @@ impl<Window> Browser<Window> where Window: WindowMethods + 'static {
debugger_chan,
devtools_chan,
supports_clipboard,
+ &webrender,
webrender_api_sender.clone());
// Send the constellation's swmanager sender to service worker manager thread
@@ -260,6 +264,7 @@ fn create_constellation(user_agent: Cow<'static, str>,
debugger_chan: Option<debugger::Sender>,
devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>,
supports_clipboard: bool,
+ webrender: &webrender::Renderer,
webrender_api_sender: webrender_traits::RenderApiSender)
-> (Sender<ConstellationMsg>, SWManagerSenders) {
let bluetooth_thread: IpcSender<BluetoothRequest> = BluetoothThreadFactory::new();
@@ -295,6 +300,16 @@ fn create_constellation(user_agent: Cow<'static, str>,
layout_thread::LayoutThread,
script::script_thread::ScriptThread>::start(initial_state);
+ if PREFS.is_webvr_enabled() {
+ // WebVR initialization
+ let (mut handler, sender) = WebVRCompositorHandler::new();
+ let webvr_thread = WebVRThread::spawn(constellation_chan.clone(), sender);
+ handler.set_webvr_thread_sender(webvr_thread.clone());
+
+ webrender.set_vr_compositor_handler(handler);
+ constellation_chan.send(ConstellationMsg::SetWebVRThread(webvr_thread)).unwrap();
+ }
+
if let Some(url) = url {
constellation_chan.send(ConstellationMsg::InitLoadUrl(url)).unwrap();
};
diff --git a/components/style/build_gecko.rs b/components/style/build_gecko.rs
index 2b75121f1d0..161a0e86f1f 100644
--- a/components/style/build_gecko.rs
+++ b/components/style/build_gecko.rs
@@ -244,7 +244,6 @@ mod bindings {
"NODE_.*",
"NS_FONT_.*",
"NS_STYLE_.*",
- "NS_CORNER_.*",
"NS_RADIUS_.*",
"BORDER_COLOR_.*",
"BORDER_STYLE_.*"
@@ -256,6 +255,7 @@ mod bindings {
"mozilla::ConsumeStyleBehavior",
"mozilla::CSSPseudoClassType",
"mozilla::css::SheetParsingMode",
+ "mozilla::HalfCorner",
"mozilla::TraversalRootBehavior",
"mozilla::DisplayItemClip", // Needed because bindgen generates
// specialization tests for this even
@@ -279,6 +279,7 @@ mod bindings {
"gfxFontFeature",
"gfxFontVariation",
"GridNamedArea",
+ "HalfCorner",
"Image",
"ImageURL",
"nsAttrName",
diff --git a/components/style/gecko_bindings/bindings.rs b/components/style/gecko_bindings/bindings.rs
index a9b73f54e8f..1546e6a6950 100644
--- a/components/style/gecko_bindings/bindings.rs
+++ b/components/style/gecko_bindings/bindings.rs
@@ -632,6 +632,11 @@ extern "C" {
nsStyleImageLayers_LayerType);
}
extern "C" {
+ pub fn Gecko_EnsureStyleAnimationArrayLength(array:
+ *mut ::std::os::raw::c_void,
+ len: usize);
+}
+extern "C" {
pub fn Gecko_ResetStyleCoord(unit: *mut nsStyleUnit,
value: *mut nsStyleUnion);
}
diff --git a/components/style/gecko_bindings/structs_debug.rs b/components/style/gecko_bindings/structs_debug.rs
index bb2f68ee737..8560f3b6ccb 100644
--- a/components/style/gecko_bindings/structs_debug.rs
+++ b/components/style/gecko_bindings/structs_debug.rs
@@ -226,14 +226,6 @@ pub mod root {
250;
pub const NS_FONT_VARIANT_NORMAL: ::std::os::raw::c_uint = 0;
pub const NS_FONT_VARIANT_SMALL_CAPS: ::std::os::raw::c_uint = 1;
- pub const NS_CORNER_TOP_LEFT_X: ::std::os::raw::c_uint = 0;
- pub const NS_CORNER_TOP_LEFT_Y: ::std::os::raw::c_uint = 1;
- pub const NS_CORNER_TOP_RIGHT_X: ::std::os::raw::c_uint = 2;
- pub const NS_CORNER_TOP_RIGHT_Y: ::std::os::raw::c_uint = 3;
- pub const NS_CORNER_BOTTOM_RIGHT_X: ::std::os::raw::c_uint = 4;
- pub const NS_CORNER_BOTTOM_RIGHT_Y: ::std::os::raw::c_uint = 5;
- pub const NS_CORNER_BOTTOM_LEFT_X: ::std::os::raw::c_uint = 6;
- pub const NS_CORNER_BOTTOM_LEFT_Y: ::std::os::raw::c_uint = 7;
pub const NS_STYLE_STACK_SIZING_IGNORE: ::std::os::raw::c_uint = 0;
pub const NS_STYLE_STACK_SIZING_STRETCH_TO_FIT: ::std::os::raw::c_uint =
1;
@@ -761,9 +753,6 @@ pub mod root {
pub const NS_STYLE_WORDBREAK_KEEP_ALL: ::std::os::raw::c_uint = 2;
pub const NS_STYLE_OVERFLOWWRAP_NORMAL: ::std::os::raw::c_uint = 0;
pub const NS_STYLE_OVERFLOWWRAP_BREAK_WORD: ::std::os::raw::c_uint = 1;
- pub const NS_STYLE_HYPHENS_NONE: ::std::os::raw::c_uint = 0;
- pub const NS_STYLE_HYPHENS_MANUAL: ::std::os::raw::c_uint = 1;
- pub const NS_STYLE_HYPHENS_AUTO: ::std::os::raw::c_uint = 2;
pub const NS_STYLE_RUBY_ALIGN_START: ::std::os::raw::c_uint = 0;
pub const NS_STYLE_RUBY_ALIGN_CENTER: ::std::os::raw::c_uint = 1;
pub const NS_STYLE_RUBY_ALIGN_SPACE_BETWEEN: ::std::os::raw::c_uint = 2;
@@ -1372,9 +1361,6 @@ pub mod root {
impl Clone for AudioContext {
fn clone(&self) -> Self { *self }
}
- pub const ReferrerPolicy_RP_Default:
- root::mozilla::dom::ReferrerPolicy =
- ReferrerPolicy::RP_No_Referrer_When_Downgrade;
#[repr(u32)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ReferrerPolicy {
@@ -2332,6 +2318,7 @@ pub mod root {
pub mTitle: ::nsstring::nsStringRepr,
pub mDocument: *mut root::nsIDocument,
pub mOwningNode: *mut root::nsINode,
+ pub mMedia: root::RefPtr<root::nsMediaList>,
pub mParsingMode: root::mozilla::css::SheetParsingMode,
pub mType: root::StyleBackendType,
pub mDisabled: bool,
@@ -2359,7 +2346,7 @@ pub mod root {
}
#[test]
fn bindgen_test_layout_StyleSheet() {
- assert_eq!(::std::mem::size_of::<StyleSheet>() , 88usize);
+ assert_eq!(::std::mem::size_of::<StyleSheet>() , 96usize);
assert_eq!(::std::mem::align_of::<StyleSheet>() , 8usize);
}
#[repr(C)]
@@ -2378,6 +2365,18 @@ pub mod root {
eSideBottom = 2,
eSideLeft = 3,
}
+ #[repr(u32)]
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub enum HalfCorner {
+ eCornerTopLeftX = 0,
+ eCornerTopLeftY = 1,
+ eCornerTopRightX = 2,
+ eCornerTopRightY = 3,
+ eCornerBottomRightX = 4,
+ eCornerBottomRightY = 5,
+ eCornerBottomLeftX = 6,
+ eCornerBottomLeftY = 7,
+ }
#[repr(C)]
#[derive(Debug, Copy)]
pub struct SVGAttrAnimationRuleProcessor {
@@ -2757,6 +2756,9 @@ pub mod root {
pub enum StyleFloatEdge { ContentBox = 0, MarginBox = 1, }
#[repr(u8)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub enum StyleHyphens { None = 0, Manual = 1, Auto = 2, }
+ #[repr(u8)]
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum StyleShapeOutsideShapeBox {
NoBox = 0,
Content = 1,
@@ -3055,6 +3057,93 @@ pub mod root {
assert_eq!(::std::mem::align_of::<root::mozilla::DefaultDelete<root::RawServoStyleSet>>()
, 1usize);
}
+ #[repr(u8)]
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub enum CSSPseudoClassType {
+ empty = 0,
+ mozOnlyWhitespace = 1,
+ mozEmptyExceptChildrenWithLocalname = 2,
+ lang = 3,
+ mozBoundElement = 4,
+ root = 5,
+ any = 6,
+ firstChild = 7,
+ firstNode = 8,
+ lastChild = 9,
+ lastNode = 10,
+ onlyChild = 11,
+ firstOfType = 12,
+ lastOfType = 13,
+ onlyOfType = 14,
+ nthChild = 15,
+ nthLastChild = 16,
+ nthOfType = 17,
+ nthLastOfType = 18,
+ mozIsHTML = 19,
+ unresolved = 20,
+ mozNativeAnonymous = 21,
+ mozSystemMetric = 22,
+ mozLocaleDir = 23,
+ mozLWTheme = 24,
+ mozLWThemeBrightText = 25,
+ mozLWThemeDarkText = 26,
+ mozWindowInactive = 27,
+ mozTableBorderNonzero = 28,
+ mozBrowserFrame = 29,
+ scope = 30,
+ negation = 31,
+ dir = 32,
+ link = 33,
+ mozAnyLink = 34,
+ anyLink = 35,
+ visited = 36,
+ active = 37,
+ checked = 38,
+ disabled = 39,
+ enabled = 40,
+ focus = 41,
+ focusWithin = 42,
+ hover = 43,
+ mozDragOver = 44,
+ target = 45,
+ indeterminate = 46,
+ mozDevtoolsHighlighted = 47,
+ mozStyleeditorTransitioning = 48,
+ fullscreen = 49,
+ mozFullScreen = 50,
+ mozFocusRing = 51,
+ mozBroken = 52,
+ mozLoading = 53,
+ mozUserDisabled = 54,
+ mozSuppressed = 55,
+ mozHandlerClickToPlay = 56,
+ mozHandlerVulnerableUpdatable = 57,
+ mozHandlerVulnerableNoUpdate = 58,
+ mozHandlerDisabled = 59,
+ mozHandlerBlocked = 60,
+ mozHandlerCrashed = 61,
+ mozMathIncrementScriptLevel = 62,
+ required = 63,
+ optional = 64,
+ valid = 65,
+ invalid = 66,
+ inRange = 67,
+ outOfRange = 68,
+ defaultPseudo = 69,
+ placeholderShown = 70,
+ mozReadOnly = 71,
+ mozReadWrite = 72,
+ mozSubmitInvalid = 73,
+ mozUIInvalid = 74,
+ mozUIValid = 75,
+ mozMeterOptimum = 76,
+ mozMeterSubOptimum = 77,
+ mozMeterSubSubOptimum = 78,
+ mozPlaceholder = 79,
+ Count = 80,
+ NotPseudo = 81,
+ MAX = 82,
+ }
#[repr(C)]
#[derive(Debug)]
pub struct CSSVariableValues {
@@ -3088,23 +3177,6 @@ pub mod root {
assert_eq!(::std::mem::size_of::<CSSVariableValues>() , 56usize);
assert_eq!(::std::mem::align_of::<CSSVariableValues>() , 8usize);
}
- #[repr(C)]
- pub struct CounterStyle__bindgen_vtable {
- }
- #[repr(C)]
- #[derive(Debug, Copy)]
- pub struct CounterStyle {
- pub vtable_: *const CounterStyle__bindgen_vtable,
- pub mStyle: i32,
- }
- #[test]
- fn bindgen_test_layout_CounterStyle() {
- assert_eq!(::std::mem::size_of::<CounterStyle>() , 16usize);
- assert_eq!(::std::mem::align_of::<CounterStyle>() , 8usize);
- }
- impl Clone for CounterStyle {
- fn clone(&self) -> Self { *self }
- }
pub mod image {
#[allow(unused_imports)]
use self::super::super::super::root;
@@ -3162,6 +3234,23 @@ pub mod root {
}
}
#[repr(C)]
+ pub struct CounterStyle__bindgen_vtable {
+ }
+ #[repr(C)]
+ #[derive(Debug, Copy)]
+ pub struct CounterStyle {
+ pub vtable_: *const CounterStyle__bindgen_vtable,
+ pub mStyle: i32,
+ }
+ #[test]
+ fn bindgen_test_layout_CounterStyle() {
+ assert_eq!(::std::mem::size_of::<CounterStyle>() , 16usize);
+ assert_eq!(::std::mem::align_of::<CounterStyle>() , 8usize);
+ }
+ impl Clone for CounterStyle {
+ fn clone(&self) -> Self { *self }
+ }
+ #[repr(C)]
#[derive(Debug, Copy)]
pub struct Position {
pub mXPosition: root::mozilla::Position_Coord,
@@ -3243,93 +3332,6 @@ pub mod root {
root::mozilla::StyleShapeSource<root::mozilla::StyleGeometryBox>;
pub type StyleShapeOutside =
root::mozilla::StyleShapeSource<root::mozilla::StyleShapeOutsideShapeBox>;
- #[repr(u8)]
- #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
- pub enum CSSPseudoClassType {
- empty = 0,
- mozOnlyWhitespace = 1,
- mozEmptyExceptChildrenWithLocalname = 2,
- lang = 3,
- mozBoundElement = 4,
- root = 5,
- any = 6,
- firstChild = 7,
- firstNode = 8,
- lastChild = 9,
- lastNode = 10,
- onlyChild = 11,
- firstOfType = 12,
- lastOfType = 13,
- onlyOfType = 14,
- nthChild = 15,
- nthLastChild = 16,
- nthOfType = 17,
- nthLastOfType = 18,
- mozIsHTML = 19,
- unresolved = 20,
- mozNativeAnonymous = 21,
- mozSystemMetric = 22,
- mozLocaleDir = 23,
- mozLWTheme = 24,
- mozLWThemeBrightText = 25,
- mozLWThemeDarkText = 26,
- mozWindowInactive = 27,
- mozTableBorderNonzero = 28,
- mozBrowserFrame = 29,
- scope = 30,
- negation = 31,
- dir = 32,
- link = 33,
- mozAnyLink = 34,
- anyLink = 35,
- visited = 36,
- active = 37,
- checked = 38,
- disabled = 39,
- enabled = 40,
- focus = 41,
- focusWithin = 42,
- hover = 43,
- mozDragOver = 44,
- target = 45,
- indeterminate = 46,
- mozDevtoolsHighlighted = 47,
- mozStyleeditorTransitioning = 48,
- fullscreen = 49,
- mozFullScreen = 50,
- mozFocusRing = 51,
- mozBroken = 52,
- mozLoading = 53,
- mozUserDisabled = 54,
- mozSuppressed = 55,
- mozHandlerClickToPlay = 56,
- mozHandlerVulnerableUpdatable = 57,
- mozHandlerVulnerableNoUpdate = 58,
- mozHandlerDisabled = 59,
- mozHandlerBlocked = 60,
- mozHandlerCrashed = 61,
- mozMathIncrementScriptLevel = 62,
- required = 63,
- optional = 64,
- valid = 65,
- invalid = 66,
- inRange = 67,
- outOfRange = 68,
- defaultPseudo = 69,
- placeholderShown = 70,
- mozReadOnly = 71,
- mozReadWrite = 72,
- mozSubmitInvalid = 73,
- mozUIInvalid = 74,
- mozUIValid = 75,
- mozMeterOptimum = 76,
- mozMeterSubOptimum = 77,
- mozMeterSubSubOptimum = 78,
- mozPlaceholder = 79,
- Count = 80,
- NotPseudo = 81,
- MAX = 82,
- }
#[test]
fn __bindgen_test_layout_template_2() {
assert_eq!(::std::mem::size_of::<root::mozilla::StyleShapeSource<root::mozilla::StyleGeometryBox>>()
@@ -4384,6 +4386,7 @@ pub mod root {
pub struct nsCycleCollectionParticipant {
pub vtable_: *const nsCycleCollectionParticipant__bindgen_vtable,
pub mMightSkip: bool,
+ pub mTraverseShouldTrace: bool,
}
#[test]
fn bindgen_test_layout_nsCycleCollectionParticipant() {
@@ -4861,8 +4864,7 @@ pub mod root {
* causes between the native object and the JS object, so it is important that
* any native object that supports preserving of its wrapper
* traces/traverses/unlinks the cached JS object (see
- * NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER,
- * NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS and
+ * NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER and
* NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER).
*/
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@@ -5063,7 +5065,7 @@ pub mod root {
pub mStates: root::EventStates_InternalType,
}
pub type EventStates_InternalType = u64;
- pub type EventStates_ServoType = u8;
+ pub type EventStates_ServoType = u16;
#[test]
fn bindgen_test_layout_EventStates() {
assert_eq!(::std::mem::size_of::<EventStates>() , 8usize);
@@ -6540,6 +6542,12 @@ pub mod root {
pub mFramesConstructed: u64,
pub mFramesReflowed: u64,
pub mReflowStartTime: root::mozilla::TimeStamp,
+ pub mFirstPaintTime: root::mozilla::TimeStamp,
+ pub mFirstClickTime: root::mozilla::TimeStamp,
+ pub mFirstKeyTime: root::mozilla::TimeStamp,
+ pub mFirstMouseMoveTime: root::mozilla::TimeStamp,
+ pub mFirstScrollTime: root::mozilla::TimeStamp,
+ pub mInteractionTimeEnabled: bool,
pub mLastStyleUpdateForAllAnimations: root::mozilla::TimeStamp,
pub _bitfield_1: u64,
pub mRestyleLoggingEnabled: bool,
@@ -6576,6 +6584,14 @@ pub mod root {
eContext_Print = 2,
eContext_PageLayout = 3,
}
+ #[repr(u32)]
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub enum nsPresContext_InteractionType {
+ eClickInteraction = 0,
+ eKeyInteraction = 1,
+ eMouseMoveInteraction = 2,
+ eScrollInteraction = 3,
+ }
/**
* A class that can be used to temporarily disable reflow interruption.
*/
@@ -6600,7 +6616,7 @@ pub mod root {
}
#[test]
fn bindgen_test_layout_nsPresContext() {
- assert_eq!(::std::mem::size_of::<nsPresContext>() , 1248usize);
+ assert_eq!(::std::mem::size_of::<nsPresContext>() , 1296usize);
assert_eq!(::std::mem::align_of::<nsPresContext>() , 8usize);
}
impl nsPresContext {
@@ -7950,6 +7966,14 @@ pub mod root {
*mut ::std::os::raw::c_void)
-> ::std::os::raw::c_int>;
#[repr(C)]
+ #[derive(Debug, Copy)]
+ pub struct nsMediaList {
+ pub _address: u8,
+ }
+ impl Clone for nsMediaList {
+ fn clone(&self) -> Self { *self }
+ }
+ #[repr(C)]
#[derive(Debug)]
pub struct nsAttrValue {
pub mBits: usize,
@@ -9220,10 +9244,6 @@ pub mod root {
impl Clone for nsFrameManagerBase_UndisplayedMap {
fn clone(&self) -> Self { *self }
}
- extern "C" {
- #[link_name = "_ZN18nsFrameManagerBase23sGlobalGenerationNumberE"]
- pub static mut nsFrameManagerBase_sGlobalGenerationNumber: u32;
- }
#[test]
fn bindgen_test_layout_nsFrameManagerBase() {
assert_eq!(::std::mem::size_of::<nsFrameManagerBase>() , 88usize);
@@ -11661,14 +11681,14 @@ pub mod root {
pub type RawGeckoNode = root::nsINode;
pub type RawGeckoElement = root::mozilla::dom::Element;
pub type RawGeckoDocument = root::nsIDocument;
- pub type RawGeckoPresContext = [u64; 156usize];
+ pub type RawGeckoPresContext = [u64; 162usize];
pub type RawGeckoNodeBorrowed = *const root::RawGeckoNode;
pub type RawGeckoNodeBorrowedOrNull = *const root::RawGeckoNode;
pub type RawGeckoElementBorrowed = *const root::RawGeckoElement;
pub type RawGeckoElementBorrowedOrNull = *const root::RawGeckoElement;
pub type RawGeckoDocumentBorrowed = *const root::RawGeckoDocument;
pub type RawGeckoDocumentBorrowedOrNull = *const root::RawGeckoDocument;
- pub type RawGeckoPresContextBorrowed = *const [u64; 156usize];
+ pub type RawGeckoPresContextBorrowed = *const [u64; 162usize];
#[repr(u32)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum nsCSSTokenSerializationType {
@@ -11700,20 +11720,6 @@ pub mod root {
}
#[repr(C)]
#[derive(Debug, Copy)]
- pub struct LookAndFeelInt {
- pub id: i32,
- pub value: i32,
- }
- #[test]
- fn bindgen_test_layout_LookAndFeelInt() {
- assert_eq!(::std::mem::size_of::<LookAndFeelInt>() , 8usize);
- assert_eq!(::std::mem::align_of::<LookAndFeelInt>() , 4usize);
- }
- impl Clone for LookAndFeelInt {
- fn clone(&self) -> Self { *self }
- }
- #[repr(C)]
- #[derive(Debug, Copy)]
pub struct imgIContainer {
pub _address: u8,
}
@@ -12540,7 +12546,7 @@ pub mod root {
pub mWhiteSpace: u8,
pub mWordBreak: u8,
pub mOverflowWrap: u8,
- pub mHyphens: u8,
+ pub mHyphens: root::mozilla::StyleHyphens,
pub mRubyAlign: u8,
pub mRubyPosition: u8,
pub mTextSizeAdjust: u8,
diff --git a/components/style/gecko_bindings/structs_release.rs b/components/style/gecko_bindings/structs_release.rs
index 00602c8f9c2..45058c648c6 100644
--- a/components/style/gecko_bindings/structs_release.rs
+++ b/components/style/gecko_bindings/structs_release.rs
@@ -226,14 +226,6 @@ pub mod root {
250;
pub const NS_FONT_VARIANT_NORMAL: ::std::os::raw::c_uint = 0;
pub const NS_FONT_VARIANT_SMALL_CAPS: ::std::os::raw::c_uint = 1;
- pub const NS_CORNER_TOP_LEFT_X: ::std::os::raw::c_uint = 0;
- pub const NS_CORNER_TOP_LEFT_Y: ::std::os::raw::c_uint = 1;
- pub const NS_CORNER_TOP_RIGHT_X: ::std::os::raw::c_uint = 2;
- pub const NS_CORNER_TOP_RIGHT_Y: ::std::os::raw::c_uint = 3;
- pub const NS_CORNER_BOTTOM_RIGHT_X: ::std::os::raw::c_uint = 4;
- pub const NS_CORNER_BOTTOM_RIGHT_Y: ::std::os::raw::c_uint = 5;
- pub const NS_CORNER_BOTTOM_LEFT_X: ::std::os::raw::c_uint = 6;
- pub const NS_CORNER_BOTTOM_LEFT_Y: ::std::os::raw::c_uint = 7;
pub const NS_STYLE_STACK_SIZING_IGNORE: ::std::os::raw::c_uint = 0;
pub const NS_STYLE_STACK_SIZING_STRETCH_TO_FIT: ::std::os::raw::c_uint =
1;
@@ -761,9 +753,6 @@ pub mod root {
pub const NS_STYLE_WORDBREAK_KEEP_ALL: ::std::os::raw::c_uint = 2;
pub const NS_STYLE_OVERFLOWWRAP_NORMAL: ::std::os::raw::c_uint = 0;
pub const NS_STYLE_OVERFLOWWRAP_BREAK_WORD: ::std::os::raw::c_uint = 1;
- pub const NS_STYLE_HYPHENS_NONE: ::std::os::raw::c_uint = 0;
- pub const NS_STYLE_HYPHENS_MANUAL: ::std::os::raw::c_uint = 1;
- pub const NS_STYLE_HYPHENS_AUTO: ::std::os::raw::c_uint = 2;
pub const NS_STYLE_RUBY_ALIGN_START: ::std::os::raw::c_uint = 0;
pub const NS_STYLE_RUBY_ALIGN_CENTER: ::std::os::raw::c_uint = 1;
pub const NS_STYLE_RUBY_ALIGN_SPACE_BETWEEN: ::std::os::raw::c_uint = 2;
@@ -1367,9 +1356,6 @@ pub mod root {
impl Clone for AudioContext {
fn clone(&self) -> Self { *self }
}
- pub const ReferrerPolicy_RP_Default:
- root::mozilla::dom::ReferrerPolicy =
- ReferrerPolicy::RP_No_Referrer_When_Downgrade;
#[repr(u32)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ReferrerPolicy {
@@ -2315,6 +2301,7 @@ pub mod root {
pub mTitle: ::nsstring::nsStringRepr,
pub mDocument: *mut root::nsIDocument,
pub mOwningNode: *mut root::nsINode,
+ pub mMedia: root::RefPtr<root::nsMediaList>,
pub mParsingMode: root::mozilla::css::SheetParsingMode,
pub mType: root::StyleBackendType,
pub mDisabled: bool,
@@ -2342,7 +2329,7 @@ pub mod root {
}
#[test]
fn bindgen_test_layout_StyleSheet() {
- assert_eq!(::std::mem::size_of::<StyleSheet>() , 80usize);
+ assert_eq!(::std::mem::size_of::<StyleSheet>() , 88usize);
assert_eq!(::std::mem::align_of::<StyleSheet>() , 8usize);
}
#[repr(C)]
@@ -2361,6 +2348,18 @@ pub mod root {
eSideBottom = 2,
eSideLeft = 3,
}
+ #[repr(u32)]
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub enum HalfCorner {
+ eCornerTopLeftX = 0,
+ eCornerTopLeftY = 1,
+ eCornerTopRightX = 2,
+ eCornerTopRightY = 3,
+ eCornerBottomRightX = 4,
+ eCornerBottomRightY = 5,
+ eCornerBottomLeftX = 6,
+ eCornerBottomLeftY = 7,
+ }
#[repr(C)]
#[derive(Debug, Copy)]
pub struct SVGAttrAnimationRuleProcessor {
@@ -2740,6 +2739,9 @@ pub mod root {
pub enum StyleFloatEdge { ContentBox = 0, MarginBox = 1, }
#[repr(u8)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub enum StyleHyphens { None = 0, Manual = 1, Auto = 2, }
+ #[repr(u8)]
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum StyleShapeOutsideShapeBox {
NoBox = 0,
Content = 1,
@@ -3037,6 +3039,93 @@ pub mod root {
assert_eq!(::std::mem::align_of::<root::mozilla::DefaultDelete<root::RawServoStyleSet>>()
, 1usize);
}
+ #[repr(u8)]
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub enum CSSPseudoClassType {
+ empty = 0,
+ mozOnlyWhitespace = 1,
+ mozEmptyExceptChildrenWithLocalname = 2,
+ lang = 3,
+ mozBoundElement = 4,
+ root = 5,
+ any = 6,
+ firstChild = 7,
+ firstNode = 8,
+ lastChild = 9,
+ lastNode = 10,
+ onlyChild = 11,
+ firstOfType = 12,
+ lastOfType = 13,
+ onlyOfType = 14,
+ nthChild = 15,
+ nthLastChild = 16,
+ nthOfType = 17,
+ nthLastOfType = 18,
+ mozIsHTML = 19,
+ unresolved = 20,
+ mozNativeAnonymous = 21,
+ mozSystemMetric = 22,
+ mozLocaleDir = 23,
+ mozLWTheme = 24,
+ mozLWThemeBrightText = 25,
+ mozLWThemeDarkText = 26,
+ mozWindowInactive = 27,
+ mozTableBorderNonzero = 28,
+ mozBrowserFrame = 29,
+ scope = 30,
+ negation = 31,
+ dir = 32,
+ link = 33,
+ mozAnyLink = 34,
+ anyLink = 35,
+ visited = 36,
+ active = 37,
+ checked = 38,
+ disabled = 39,
+ enabled = 40,
+ focus = 41,
+ focusWithin = 42,
+ hover = 43,
+ mozDragOver = 44,
+ target = 45,
+ indeterminate = 46,
+ mozDevtoolsHighlighted = 47,
+ mozStyleeditorTransitioning = 48,
+ fullscreen = 49,
+ mozFullScreen = 50,
+ mozFocusRing = 51,
+ mozBroken = 52,
+ mozLoading = 53,
+ mozUserDisabled = 54,
+ mozSuppressed = 55,
+ mozHandlerClickToPlay = 56,
+ mozHandlerVulnerableUpdatable = 57,
+ mozHandlerVulnerableNoUpdate = 58,
+ mozHandlerDisabled = 59,
+ mozHandlerBlocked = 60,
+ mozHandlerCrashed = 61,
+ mozMathIncrementScriptLevel = 62,
+ required = 63,
+ optional = 64,
+ valid = 65,
+ invalid = 66,
+ inRange = 67,
+ outOfRange = 68,
+ defaultPseudo = 69,
+ placeholderShown = 70,
+ mozReadOnly = 71,
+ mozReadWrite = 72,
+ mozSubmitInvalid = 73,
+ mozUIInvalid = 74,
+ mozUIValid = 75,
+ mozMeterOptimum = 76,
+ mozMeterSubOptimum = 77,
+ mozMeterSubSubOptimum = 78,
+ mozPlaceholder = 79,
+ Count = 80,
+ NotPseudo = 81,
+ MAX = 82,
+ }
#[repr(C)]
#[derive(Debug)]
pub struct CSSVariableValues {
@@ -3070,23 +3159,6 @@ pub mod root {
assert_eq!(::std::mem::size_of::<CSSVariableValues>() , 48usize);
assert_eq!(::std::mem::align_of::<CSSVariableValues>() , 8usize);
}
- #[repr(C)]
- pub struct CounterStyle__bindgen_vtable {
- }
- #[repr(C)]
- #[derive(Debug, Copy)]
- pub struct CounterStyle {
- pub vtable_: *const CounterStyle__bindgen_vtable,
- pub mStyle: i32,
- }
- #[test]
- fn bindgen_test_layout_CounterStyle() {
- assert_eq!(::std::mem::size_of::<CounterStyle>() , 16usize);
- assert_eq!(::std::mem::align_of::<CounterStyle>() , 8usize);
- }
- impl Clone for CounterStyle {
- fn clone(&self) -> Self { *self }
- }
pub mod image {
#[allow(unused_imports)]
use self::super::super::super::root;
@@ -3144,6 +3216,23 @@ pub mod root {
}
}
#[repr(C)]
+ pub struct CounterStyle__bindgen_vtable {
+ }
+ #[repr(C)]
+ #[derive(Debug, Copy)]
+ pub struct CounterStyle {
+ pub vtable_: *const CounterStyle__bindgen_vtable,
+ pub mStyle: i32,
+ }
+ #[test]
+ fn bindgen_test_layout_CounterStyle() {
+ assert_eq!(::std::mem::size_of::<CounterStyle>() , 16usize);
+ assert_eq!(::std::mem::align_of::<CounterStyle>() , 8usize);
+ }
+ impl Clone for CounterStyle {
+ fn clone(&self) -> Self { *self }
+ }
+ #[repr(C)]
#[derive(Debug, Copy)]
pub struct Position {
pub mXPosition: root::mozilla::Position_Coord,
@@ -3225,93 +3314,6 @@ pub mod root {
root::mozilla::StyleShapeSource<root::mozilla::StyleGeometryBox>;
pub type StyleShapeOutside =
root::mozilla::StyleShapeSource<root::mozilla::StyleShapeOutsideShapeBox>;
- #[repr(u8)]
- #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
- pub enum CSSPseudoClassType {
- empty = 0,
- mozOnlyWhitespace = 1,
- mozEmptyExceptChildrenWithLocalname = 2,
- lang = 3,
- mozBoundElement = 4,
- root = 5,
- any = 6,
- firstChild = 7,
- firstNode = 8,
- lastChild = 9,
- lastNode = 10,
- onlyChild = 11,
- firstOfType = 12,
- lastOfType = 13,
- onlyOfType = 14,
- nthChild = 15,
- nthLastChild = 16,
- nthOfType = 17,
- nthLastOfType = 18,
- mozIsHTML = 19,
- unresolved = 20,
- mozNativeAnonymous = 21,
- mozSystemMetric = 22,
- mozLocaleDir = 23,
- mozLWTheme = 24,
- mozLWThemeBrightText = 25,
- mozLWThemeDarkText = 26,
- mozWindowInactive = 27,
- mozTableBorderNonzero = 28,
- mozBrowserFrame = 29,
- scope = 30,
- negation = 31,
- dir = 32,
- link = 33,
- mozAnyLink = 34,
- anyLink = 35,
- visited = 36,
- active = 37,
- checked = 38,
- disabled = 39,
- enabled = 40,
- focus = 41,
- focusWithin = 42,
- hover = 43,
- mozDragOver = 44,
- target = 45,
- indeterminate = 46,
- mozDevtoolsHighlighted = 47,
- mozStyleeditorTransitioning = 48,
- fullscreen = 49,
- mozFullScreen = 50,
- mozFocusRing = 51,
- mozBroken = 52,
- mozLoading = 53,
- mozUserDisabled = 54,
- mozSuppressed = 55,
- mozHandlerClickToPlay = 56,
- mozHandlerVulnerableUpdatable = 57,
- mozHandlerVulnerableNoUpdate = 58,
- mozHandlerDisabled = 59,
- mozHandlerBlocked = 60,
- mozHandlerCrashed = 61,
- mozMathIncrementScriptLevel = 62,
- required = 63,
- optional = 64,
- valid = 65,
- invalid = 66,
- inRange = 67,
- outOfRange = 68,
- defaultPseudo = 69,
- placeholderShown = 70,
- mozReadOnly = 71,
- mozReadWrite = 72,
- mozSubmitInvalid = 73,
- mozUIInvalid = 74,
- mozUIValid = 75,
- mozMeterOptimum = 76,
- mozMeterSubOptimum = 77,
- mozMeterSubSubOptimum = 78,
- mozPlaceholder = 79,
- Count = 80,
- NotPseudo = 81,
- MAX = 82,
- }
#[test]
fn __bindgen_test_layout_template_2() {
assert_eq!(::std::mem::size_of::<root::mozilla::StyleShapeSource<root::mozilla::StyleGeometryBox>>()
@@ -4366,6 +4368,7 @@ pub mod root {
pub struct nsCycleCollectionParticipant {
pub vtable_: *const nsCycleCollectionParticipant__bindgen_vtable,
pub mMightSkip: bool,
+ pub mTraverseShouldTrace: bool,
}
#[test]
fn bindgen_test_layout_nsCycleCollectionParticipant() {
@@ -4831,8 +4834,7 @@ pub mod root {
* causes between the native object and the JS object, so it is important that
* any native object that supports preserving of its wrapper
* traces/traverses/unlinks the cached JS object (see
- * NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER,
- * NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS and
+ * NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER and
* NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER).
*/
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@@ -5033,7 +5035,7 @@ pub mod root {
pub mStates: root::EventStates_InternalType,
}
pub type EventStates_InternalType = u64;
- pub type EventStates_ServoType = u8;
+ pub type EventStates_ServoType = u16;
#[test]
fn bindgen_test_layout_EventStates() {
assert_eq!(::std::mem::size_of::<EventStates>() , 8usize);
@@ -6504,6 +6506,12 @@ pub mod root {
pub mFramesConstructed: u64,
pub mFramesReflowed: u64,
pub mReflowStartTime: root::mozilla::TimeStamp,
+ pub mFirstPaintTime: root::mozilla::TimeStamp,
+ pub mFirstClickTime: root::mozilla::TimeStamp,
+ pub mFirstKeyTime: root::mozilla::TimeStamp,
+ pub mFirstMouseMoveTime: root::mozilla::TimeStamp,
+ pub mFirstScrollTime: root::mozilla::TimeStamp,
+ pub mInteractionTimeEnabled: bool,
pub mLastStyleUpdateForAllAnimations: root::mozilla::TimeStamp,
pub _bitfield_1: u64,
}
@@ -6537,6 +6545,14 @@ pub mod root {
eContext_Print = 2,
eContext_PageLayout = 3,
}
+ #[repr(u32)]
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub enum nsPresContext_InteractionType {
+ eClickInteraction = 0,
+ eKeyInteraction = 1,
+ eMouseMoveInteraction = 2,
+ eScrollInteraction = 3,
+ }
/**
* A class that can be used to temporarily disable reflow interruption.
*/
@@ -6561,7 +6577,7 @@ pub mod root {
}
#[test]
fn bindgen_test_layout_nsPresContext() {
- assert_eq!(::std::mem::size_of::<nsPresContext>() , 1216usize);
+ assert_eq!(::std::mem::size_of::<nsPresContext>() , 1264usize);
assert_eq!(::std::mem::align_of::<nsPresContext>() , 8usize);
}
impl nsPresContext {
@@ -7892,6 +7908,14 @@ pub mod root {
*mut ::std::os::raw::c_void)
-> ::std::os::raw::c_int>;
#[repr(C)]
+ #[derive(Debug, Copy)]
+ pub struct nsMediaList {
+ pub _address: u8,
+ }
+ impl Clone for nsMediaList {
+ fn clone(&self) -> Self { *self }
+ }
+ #[repr(C)]
#[derive(Debug)]
pub struct nsAttrValue {
pub mBits: usize,
@@ -9157,10 +9181,6 @@ pub mod root {
impl Clone for nsFrameManagerBase_UndisplayedMap {
fn clone(&self) -> Self { *self }
}
- extern "C" {
- #[link_name = "_ZN18nsFrameManagerBase23sGlobalGenerationNumberE"]
- pub static mut nsFrameManagerBase_sGlobalGenerationNumber: u32;
- }
#[test]
fn bindgen_test_layout_nsFrameManagerBase() {
assert_eq!(::std::mem::size_of::<nsFrameManagerBase>() , 80usize);
@@ -11588,14 +11608,14 @@ pub mod root {
pub type RawGeckoNode = root::nsINode;
pub type RawGeckoElement = root::mozilla::dom::Element;
pub type RawGeckoDocument = root::nsIDocument;
- pub type RawGeckoPresContext = [u64; 152usize];
+ pub type RawGeckoPresContext = [u64; 158usize];
pub type RawGeckoNodeBorrowed = *const root::RawGeckoNode;
pub type RawGeckoNodeBorrowedOrNull = *const root::RawGeckoNode;
pub type RawGeckoElementBorrowed = *const root::RawGeckoElement;
pub type RawGeckoElementBorrowedOrNull = *const root::RawGeckoElement;
pub type RawGeckoDocumentBorrowed = *const root::RawGeckoDocument;
pub type RawGeckoDocumentBorrowedOrNull = *const root::RawGeckoDocument;
- pub type RawGeckoPresContextBorrowed = *const [u64; 152usize];
+ pub type RawGeckoPresContextBorrowed = *const [u64; 158usize];
#[repr(u32)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum nsCSSTokenSerializationType {
@@ -11627,20 +11647,6 @@ pub mod root {
}
#[repr(C)]
#[derive(Debug, Copy)]
- pub struct LookAndFeelInt {
- pub id: i32,
- pub value: i32,
- }
- #[test]
- fn bindgen_test_layout_LookAndFeelInt() {
- assert_eq!(::std::mem::size_of::<LookAndFeelInt>() , 8usize);
- assert_eq!(::std::mem::align_of::<LookAndFeelInt>() , 4usize);
- }
- impl Clone for LookAndFeelInt {
- fn clone(&self) -> Self { *self }
- }
- #[repr(C)]
- #[derive(Debug, Copy)]
pub struct imgIContainer {
pub _address: u8,
}
@@ -12467,7 +12473,7 @@ pub mod root {
pub mWhiteSpace: u8,
pub mWordBreak: u8,
pub mOverflowWrap: u8,
- pub mHyphens: u8,
+ pub mHyphens: root::mozilla::StyleHyphens,
pub mRubyAlign: u8,
pub mRubyPosition: u8,
pub mTextSizeAdjust: u8,
diff --git a/components/style/properties/gecko.mako.rs b/components/style/properties/gecko.mako.rs
index 56c8ce5cfc3..60bd2ddbdb4 100644
--- a/components/style/properties/gecko.mako.rs
+++ b/components/style/properties/gecko.mako.rs
@@ -596,10 +596,10 @@ class Side(object):
self.index = index
class Corner(object):
- def __init__(self, name, index):
- self.x_name = "NS_CORNER_" + name + "_X"
- self.y_name = "NS_CORNER_" + name + "_Y"
- self.ident = name.lower()
+ def __init__(self, vert, horiz, index):
+ self.x_name = "HalfCorner::eCorner" + vert + horiz + "X"
+ self.y_name = "HalfCorner::eCorner" + vert + horiz + "Y"
+ self.ident = (vert + "_" + horiz).lower()
self.x_index = 2 * index
self.y_index = 2 * index + 1
@@ -610,7 +610,8 @@ class GridLine(object):
self.gecko = "m" + to_camel_case(self.ident)
SIDES = [Side("Top", 0), Side("Right", 1), Side("Bottom", 2), Side("Left", 3)]
-CORNERS = [Corner("TOP_LEFT", 0), Corner("TOP_RIGHT", 1), Corner("BOTTOM_RIGHT", 2), Corner("BOTTOM_LEFT", 3)]
+CORNERS = [Corner("Top", "Left", 0), Corner("Top", "Right", 1),
+ Corner("Bottom", "Right", 2), Corner("Bottom", "Left", 3)]
GRID_LINES = map(GridLine, ["row-start", "row-end", "column-start", "column-end"])
%>
diff --git a/components/style/properties/longhand/inherited_text.mako.rs b/components/style/properties/longhand/inherited_text.mako.rs
index e40d8e31e3f..7e5881ee8db 100644
--- a/components/style/properties/longhand/inherited_text.mako.rs
+++ b/components/style/properties/longhand/inherited_text.mako.rs
@@ -155,6 +155,7 @@ ${helpers.single_keyword("text-transform",
spec="https://drafts.csswg.org/css-text/#propdef-text-transform")}
${helpers.single_keyword("hyphens", "none manual auto",
+ gecko_enum_prefix="StyleHyphens",
products="gecko", animatable=False,
spec="https://drafts.csswg.org/css-text/#propdef-hyphens")}
diff --git a/components/style/stylist.rs b/components/style/stylist.rs
index 9c3608ef0c0..0f147f0be9b 100644
--- a/components/style/stylist.rs
+++ b/components/style/stylist.rs
@@ -276,32 +276,33 @@ impl Stylist {
parent: Option<&Arc<ComputedValues>>,
default: &Arc<ComputedValues>,
inherit_all: bool)
- -> Option<ComputedStyle> {
+ -> ComputedStyle {
debug_assert!(SelectorImpl::pseudo_element_cascade_type(pseudo).is_precomputed());
- if let Some(declarations) = self.precomputed_pseudo_element_decls.get(pseudo) {
- // FIXME(emilio): When we've taken rid of the cascade we can just
- // use into_iter.
- let rule_node =
- self.rule_tree.insert_ordered_rules(
- declarations.into_iter().map(|a| (a.source.clone(), a.importance)));
- let mut flags = CascadeFlags::empty();
- if inherit_all {
- flags.insert(INHERIT_ALL)
+ let rule_node = match self.precomputed_pseudo_element_decls.get(pseudo) {
+ Some(declarations) => {
+ // FIXME(emilio): When we've taken rid of the cascade we can just
+ // use into_iter.
+ self.rule_tree.insert_ordered_rules(
+ declarations.into_iter().map(|a| (a.source.clone(), a.importance)))
}
+ None => self.rule_tree.root(),
+ };
- let computed =
- properties::cascade(self.device.au_viewport_size(),
- &rule_node,
- parent.map(|p| &**p),
- default,
- None,
- Box::new(StdoutErrorReporter),
- flags);
- Some(ComputedStyle::new(rule_node, Arc::new(computed)))
- } else {
- parent.map(|p| ComputedStyle::new(self.rule_tree.root(), p.clone()))
+ let mut flags = CascadeFlags::empty();
+ if inherit_all {
+ flags.insert(INHERIT_ALL)
}
+
+ let computed =
+ properties::cascade(self.device.au_viewport_size(),
+ &rule_node,
+ parent.map(|p| &**p),
+ default,
+ None,
+ Box::new(StdoutErrorReporter),
+ flags);
+ ComputedStyle::new(rule_node, Arc::new(computed))
}
/// Returns the style for an anonymous box of the given type.
@@ -329,7 +330,6 @@ impl Stylist {
}
};
self.precomputed_values_for_pseudo(&pseudo, Some(parent_style), default_style, inherit_all)
- .expect("style_for_anonymous_box(): No precomputed values for that pseudo!")
.values
}
diff --git a/components/webvr/Cargo.toml b/components/webvr/Cargo.toml
new file mode 100644
index 00000000000..35d9d007c65
--- /dev/null
+++ b/components/webvr/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "webvr"
+version = "0.0.1"
+authors = ["The Servo Project Developers"]
+license = "MPL-2.0"
+publish = false
+
+[lib]
+name = "webvr"
+path = "lib.rs"
+
+[dependencies]
+ipc-channel = "0.5"
+log = "0.3"
+msg = {path = "../msg"}
+script_traits = {path = "../script_traits"}
+servo_config = {path = "../config"}
+webvr_traits = {path = "../webvr_traits" }
+
+[dependencies.webrender_traits]
+git = "https://github.com/servo/webrender"
+default_features = false
+features = ["serde_derive"]
diff --git a/components/webvr/lib.rs b/components/webvr/lib.rs
new file mode 100644
index 00000000000..941f9a59a6d
--- /dev/null
+++ b/components/webvr/lib.rs
@@ -0,0 +1,19 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#![feature(custom_derive)]
+#![feature(plugin)]
+#![deny(unsafe_code)]
+
+extern crate ipc_channel;
+#[macro_use]
+extern crate log;
+extern crate msg;
+extern crate script_traits;
+extern crate servo_config;
+extern crate webrender_traits;
+extern crate webvr_traits;
+
+mod webvr_thread;
+pub use webvr_thread::{WebVRThread, WebVRCompositorHandler};
diff --git a/components/webvr/webvr_thread.rs b/components/webvr/webvr_thread.rs
new file mode 100644
index 00000000000..5be84dd12c1
--- /dev/null
+++ b/components/webvr/webvr_thread.rs
@@ -0,0 +1,377 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use ipc_channel::ipc;
+use ipc_channel::ipc::{IpcReceiver, IpcSender};
+use msg::constellation_msg::PipelineId;
+use script_traits::{ConstellationMsg, WebVREventMsg};
+use servo_config::prefs::PREFS;
+use std::{thread, time};
+use std::collections::{HashMap, HashSet};
+use std::sync::mpsc;
+use std::sync::mpsc::{Receiver, Sender};
+use webrender_traits;
+use webvr_traits::{WebVRMsg, WebVRResult};
+use webvr_traits::webvr::*;
+
+/// WebVRThread owns native VRDisplays, handles their life cycle inside Servo and
+/// acts a doorman for untrusted VR requests from DOM Objects. These are the key components
+/// * WebVRThread::spawn() creates a long living thread that waits for VR Commands from DOM objects
+/// and handles them in its trusted thread. The back and forth comunication with DOM is implemented
+/// using IPC-channels. This thread creates the VRServiceManager instance, which handles the life cycle
+/// of all VR Vendor SDKs and owns all the native VRDisplays. These displays are guaranteed to live while
+/// the spawned thread is alive. The WebVRThread is unique and it's closed using the Exit message when the
+/// whole browser is going to be closed.
+/// * A Event Polling thread is created in order to implement WebVR Events (connected, disconnected,..).
+/// This thread wakes up the WebVRThread from time to time by sending a PollEvents message. This thread
+/// is only created when there is at least one live JavaScript context using the WebVR APIs and shuts down it when
+/// the tab is closed. A single instance of the thread is used to handle multiple JavaScript contexts.
+/// Constellation channel is used to notify events to the Script Thread.
+/// * When the WeVR APIs are used in a tab, it's pipeline_id is registered using the RegisterContext message. When
+/// the tab is closed, UnregisterContext message is sent. This way the WebVR thread has a list of the pipeline
+/// ids using the WebVR APIs. These ids are used to implement privacy guidelines defined in the WebVR Spec.
+/// * When a JavaScript thread gains access to present to a headset, WebVRThread is not used as a intermediary in
+/// the VRDisplay.requestAnimationFrame loop in order to minimize latency. A direct communication with WebRender
+/// is used instead. See WebVRCompositorHandler and the VRCompositorCommanda for more details.
+pub struct WebVRThread {
+ receiver: IpcReceiver<WebVRMsg>,
+ sender: IpcSender<WebVRMsg>,
+ service: VRServiceManager,
+ contexts: HashSet<PipelineId>,
+ constellation_chan: Sender<ConstellationMsg>,
+ vr_compositor_chan: WebVRCompositorSender,
+ polling_events: bool,
+ presenting: HashMap<u64, PipelineId>
+}
+
+impl WebVRThread {
+ fn new(receiver: IpcReceiver<WebVRMsg>,
+ sender: IpcSender<WebVRMsg>,
+ constellation_chan: Sender<ConstellationMsg>,
+ vr_compositor_chan: WebVRCompositorSender)
+ -> WebVRThread {
+ let mut service = VRServiceManager::new();
+ service.register_defaults();
+ WebVRThread {
+ receiver: receiver,
+ sender: sender,
+ service: service,
+ contexts: HashSet::new(),
+ constellation_chan: constellation_chan,
+ vr_compositor_chan: vr_compositor_chan,
+ polling_events: false,
+ presenting: HashMap::new()
+ }
+ }
+
+ pub fn spawn(constellation_chan: Sender<ConstellationMsg>,
+ vr_compositor_chan: WebVRCompositorSender)
+ -> IpcSender<WebVRMsg> {
+ let (sender, receiver) = ipc::channel().unwrap();
+ let sender_clone = sender.clone();
+ thread::Builder::new().name("WebVRThread".into()).spawn(move || {
+ WebVRThread::new(receiver, sender_clone, constellation_chan, vr_compositor_chan).start();
+ }).expect("Thread spawning failed");
+ sender
+ }
+
+ fn start(&mut self) {
+ while let Ok(msg) = self.receiver.recv() {
+ match msg {
+ WebVRMsg::RegisterContext(context) => {
+ self.handle_register_context(context);
+ self.schedule_poll_events();
+ },
+ WebVRMsg::UnregisterContext(context) => {
+ self.handle_unregister_context(context);
+ },
+ WebVRMsg::PollEvents(sender) => {
+ self.poll_events(sender);
+ },
+ WebVRMsg::GetDisplays(sender) => {
+ self.handle_get_displays(sender);
+ self.schedule_poll_events();
+ },
+ WebVRMsg::GetFrameData(pipeline_id, display_id, near, far, sender) => {
+ self.handle_framedata(pipeline_id, display_id, near, far, sender);
+ },
+ WebVRMsg::ResetPose(pipeline_id, display_id, sender) => {
+ self.handle_reset_pose(pipeline_id, display_id, sender);
+ },
+ WebVRMsg::RequestPresent(pipeline_id, display_id, sender) => {
+ self.handle_request_present(pipeline_id, display_id, sender);
+ },
+ WebVRMsg::ExitPresent(pipeline_id, display_id, sender) => {
+ self.handle_exit_present(pipeline_id, display_id, sender);
+ },
+ WebVRMsg::CreateCompositor(display_id) => {
+ self.handle_create_compositor(display_id);
+ },
+ WebVRMsg::Exit => {
+ break
+ },
+ }
+ }
+ }
+
+ fn handle_register_context(&mut self, ctx: PipelineId) {
+ self.contexts.insert(ctx);
+ }
+
+ fn handle_unregister_context(&mut self, ctx: PipelineId) {
+ self.contexts.remove(&ctx);
+ }
+
+ fn handle_get_displays(&mut self, sender: IpcSender<WebVRResult<Vec<VRDisplayData>>>) {
+ let displays = self.service.get_displays();
+ let mut result = Vec::new();
+ for display in displays {
+ result.push(display.borrow().data());
+ }
+ sender.send(Ok(result)).unwrap();
+ }
+
+ fn handle_framedata(&mut self,
+ pipeline: PipelineId,
+ display_id: u64,
+ near: f64,
+ far: f64,
+ sender: IpcSender<WebVRResult<VRFrameData>>) {
+ match self.access_check(pipeline, display_id) {
+ Ok(display) => {
+ sender.send(Ok(display.borrow().inmediate_frame_data(near, far))).unwrap()
+ },
+ Err(msg) => sender.send(Err(msg.into())).unwrap()
+ }
+ }
+
+ fn handle_reset_pose(&mut self,
+ pipeline: PipelineId,
+ display_id: u64,
+ sender: IpcSender<WebVRResult<VRDisplayData>>) {
+ match self.access_check(pipeline, display_id) {
+ Ok(display) => {
+ display.borrow_mut().reset_pose();
+ sender.send(Ok(display.borrow().data())).unwrap();
+ },
+ Err(msg) => {
+ sender.send(Err(msg.into())).unwrap()
+ }
+ }
+ }
+
+ // This method implements the privacy and security guidelines defined in the WebVR spec.
+ // For example a secondary tab is not allowed to read VRDisplay data or stop a VR presentation
+ // while the user is having a VR experience in the current tab.
+ // These security rules also avoid multithreading race conditions between WebVRThread and
+ // Webrender thread. See WebVRCompositorHandler implementation notes for more details about this.
+ fn access_check(&self, pipeline: PipelineId, display_id: u64) -> Result<&VRDisplayPtr, &'static str> {
+ if *self.presenting.get(&display_id).unwrap_or(&pipeline) != pipeline {
+ return Err("No access granted to this Display because it's presenting on other JavaScript Tab");
+ }
+ self.service.get_display(display_id).ok_or("Device not found")
+ }
+
+ fn handle_request_present(&mut self,
+ pipeline: PipelineId,
+ display_id: u64,
+ sender: IpcSender<WebVRResult<()>>) {
+ match self.access_check(pipeline, display_id).map(|d| d.clone()) {
+ Ok(display) => {
+ self.presenting.insert(display_id, pipeline);
+ let data = display.borrow().data();
+ sender.send(Ok(())).unwrap();
+ self.notify_event(VRDisplayEvent::PresentChange(data, true));
+ },
+ Err(msg) => {
+ sender.send(Err(msg.into())).unwrap();
+ }
+ }
+ }
+
+ fn handle_exit_present(&mut self,
+ pipeline: PipelineId,
+ display_id: u64,
+ sender: Option<IpcSender<WebVRResult<()>>>) {
+ match self.access_check(pipeline, display_id).map(|d| d.clone()) {
+ Ok(display) => {
+ self.presenting.remove(&display_id);
+ if let Some(sender) = sender {
+ sender.send(Ok(())).unwrap();
+ }
+ let data = display.borrow().data();
+ self.notify_event(VRDisplayEvent::PresentChange(data, false));
+ },
+ Err(msg) => {
+ if let Some(sender) = sender {
+ sender.send(Err(msg.into())).unwrap();
+ }
+ }
+ }
+ }
+
+ fn handle_create_compositor(&mut self, display_id: u64) {
+ let compositor = self.service.get_display(display_id).map(|d| WebVRCompositor(d.as_ptr()));
+ self.vr_compositor_chan.send(compositor).unwrap();
+ }
+
+ fn poll_events(&mut self, sender: IpcSender<bool>) {
+ loop {
+ let events = self.service.poll_events();
+ if events.is_empty() {
+ break;
+ }
+ self.notify_events(events)
+ }
+
+ // Stop polling events if the callers are not using VR
+ self.polling_events = self.contexts.len() > 0;
+ sender.send(self.polling_events).unwrap();
+ }
+
+ fn notify_events(&self, events: Vec<VRDisplayEvent>) {
+ let pipeline_ids: Vec<PipelineId> = self.contexts.iter().map(|c| *c).collect();
+ for event in events {
+ let event = WebVREventMsg::DisplayEvent(event);
+ self.constellation_chan.send(ConstellationMsg::WebVREvent(pipeline_ids.clone(), event)).unwrap();
+ }
+ }
+
+ #[inline]
+ fn notify_event(&self, event: VRDisplayEvent) {
+ self.notify_events(vec![event]);
+ }
+
+ fn schedule_poll_events(&mut self) {
+ if !self.service.is_initialized() || self.polling_events {
+ return;
+ }
+ self.polling_events = true;
+ let webvr_thread = self.sender.clone();
+ let (sender, receiver) = ipc::channel().unwrap();
+
+ // Defines the polling interval time in ms for VR Events such as VRDisplay connected, disconnected, etc.
+ let polling_interval: u64 = PREFS.get("dom.webvr.event_polling_interval").as_u64().unwrap_or(500);
+
+ thread::Builder::new().name("WebVRPollEvents".into()).spawn(move || {
+ loop {
+ if webvr_thread.send(WebVRMsg::PollEvents(sender.clone())).is_err() {
+ // WebVR Thread closed
+ break;
+ }
+ if !receiver.recv().unwrap_or(false) {
+ // WebVR Thread asked to unschedule this thread
+ break;
+ }
+ thread::sleep(time::Duration::from_millis(polling_interval));
+ }
+ }).expect("Thread spawning failed");
+ }
+}
+
+/// Notes about WebVRCompositorHandler implementation:
+/// Raw pointers are used instead of Arc<Mutex> as a heavy optimization for latency reasons.
+/// This also avoids "JS DDoS" attacks: like a secondary JavaScript tab degrading performance
+/// by flooding the WebVRThread with messages while the main JavaScript tab is presenting to the headset.
+/// Multithreading won't be a problem because:
+/// * Thanks to the security rules implemented in the WebVRThread, when a VRDisplay is in a presenting loop
+/// no other JSContext is granted access to the VRDisplay. So really there aren’t multithreading race conditions.
+/// * VRDisplay implementations are designed to allow calling compositor functions
+/// in another thread by using the Send + Sync traits.
+/// VRDisplays pointers are guaranteed to be valid memory:
+/// * VRDisplays are owned by the VRServiceManager which lives in the WebVRThread.
+/// * WebVRCompositorHandler is stopped automatically when a JS tab is closed or the whole browser is closed.
+/// * WebVRThread and its VRDisplays are destroyed after all tabs are dropped and the browser is about to exit.
+/// WebVRThread is closed using the Exit message.
+
+pub struct WebVRCompositor(*mut VRDisplay);
+pub struct WebVRCompositorHandler {
+ compositors: HashMap<webrender_traits::VRCompositorId, WebVRCompositor>,
+ webvr_thread_receiver: Receiver<Option<WebVRCompositor>>,
+ webvr_thread_sender: Option<IpcSender<WebVRMsg>>
+}
+
+#[allow(unsafe_code)]
+unsafe impl Send for WebVRCompositor {}
+
+pub type WebVRCompositorSender = Sender<Option<WebVRCompositor>>;
+
+impl WebVRCompositorHandler {
+ pub fn new() -> (Box<WebVRCompositorHandler>, WebVRCompositorSender) {
+ let (sender, receiver) = mpsc::channel();
+ let instance = Box::new(WebVRCompositorHandler {
+ compositors: HashMap::new(),
+ webvr_thread_receiver: receiver,
+ webvr_thread_sender: None
+ });
+
+ (instance, sender)
+ }
+}
+
+impl webrender_traits::VRCompositorHandler for WebVRCompositorHandler {
+ #[allow(unsafe_code)]
+ fn handle(&mut self, cmd: webrender_traits::VRCompositorCommand, texture_id: Option<u32>) {
+ match cmd {
+ webrender_traits::VRCompositorCommand::Create(compositor_id) => {
+ self.create_compositor(compositor_id);
+ }
+ webrender_traits::VRCompositorCommand::SyncPoses(compositor_id, near, far, sender) => {
+ if let Some(compositor) = self.compositors.get(&compositor_id) {
+ let pose = unsafe {
+ (*compositor.0).sync_poses();
+ (*compositor.0).synced_frame_data(near, far).to_bytes()
+ };
+ let _ = sender.send(Ok(pose));
+ } else {
+ let _ = sender.send(Err(()));
+ }
+ }
+ webrender_traits::VRCompositorCommand::SubmitFrame(compositor_id, left_bounds, right_bounds) => {
+ if let Some(compositor) = self.compositors.get(&compositor_id) {
+ if let Some(texture_id) = texture_id {
+ let layer = VRLayer {
+ texture_id: texture_id,
+ left_bounds: left_bounds,
+ right_bounds: right_bounds
+ };
+ unsafe {
+ (*compositor.0).submit_frame(&layer);
+ }
+ }
+ }
+ }
+ webrender_traits::VRCompositorCommand::Release(compositor_id) => {
+ self.compositors.remove(&compositor_id);
+ }
+ }
+ }
+}
+
+impl WebVRCompositorHandler {
+ #[allow(unsafe_code)]
+ fn create_compositor(&mut self, display_id: webrender_traits::VRCompositorId) {
+ let sender = match self.webvr_thread_sender {
+ Some(ref s) => s,
+ None => return,
+ };
+
+ sender.send(WebVRMsg::CreateCompositor(display_id)).unwrap();
+ let display = self.webvr_thread_receiver.recv().unwrap();
+
+ match display {
+ Some(display) => {
+ self.compositors.insert(display_id, display);
+ },
+ None => {
+ error!("VRDisplay not found when creating a new VRCompositor");
+ }
+ };
+ }
+
+ // This is done on only a per-platform basis on initialization.
+ pub fn set_webvr_thread_sender(&mut self, sender: IpcSender<WebVRMsg>) {
+ self.webvr_thread_sender = Some(sender);
+ }
+}
diff --git a/components/webvr_traits/Cargo.toml b/components/webvr_traits/Cargo.toml
new file mode 100644
index 00000000000..30467847c95
--- /dev/null
+++ b/components/webvr_traits/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "webvr_traits"
+version = "0.0.1"
+authors = ["The Servo Project Developers"]
+license = "MPL-2.0"
+publish = false
+
+[lib]
+name = "webvr_traits"
+path = "lib.rs"
+
+[dependencies]
+ipc-channel = "0.5"
+msg = {path = "../msg"}
+serde = "0.8"
+serde_derive = "0.8"
+rust-webvr = {version = "0.1", features = ["serde-serialization"]}
diff --git a/components/webvr_traits/lib.rs b/components/webvr_traits/lib.rs
new file mode 100644
index 00000000000..a64da241525
--- /dev/null
+++ b/components/webvr_traits/lib.rs
@@ -0,0 +1,29 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#![feature(custom_derive)]
+#![feature(plugin)]
+#![deny(unsafe_code)]
+
+extern crate ipc_channel;
+extern crate msg;
+extern crate serde;
+#[macro_use]
+extern crate serde_derive;
+pub extern crate rust_webvr as webvr;
+
+mod webvr_traits;
+
+pub use webvr::VRDisplayData as WebVRDisplayData;
+pub use webvr::VRDisplayCapabilities as WebVRDisplayCapabilities;
+pub use webvr::VRDisplayEvent as WebVRDisplayEvent;
+pub use webvr::VRDisplayEventReason as WebVRDisplayEventReason;
+pub use webvr::VREye as WebVREye;
+pub use webvr::VREyeParameters as WebVREyeParameters;
+pub use webvr::VRFieldOfView as WebVRFieldOfView;
+pub use webvr::VRFrameData as WebVRFrameData;
+pub use webvr::VRLayer as WebVRLayer;
+pub use webvr::VRPose as WebVRPose;
+pub use webvr::VRStageParameters as WebVRStageParameters;
+pub use webvr_traits::{WebVRMsg, WebVRResult};
diff --git a/components/webvr_traits/webvr_traits.rs b/components/webvr_traits/webvr_traits.rs
new file mode 100644
index 00000000000..d52dd20b9f7
--- /dev/null
+++ b/components/webvr_traits/webvr_traits.rs
@@ -0,0 +1,24 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+use ipc_channel::ipc::IpcSender;
+use msg::constellation_msg::PipelineId;
+use webvr::*;
+
+pub type WebVRResult<T> = Result<T, String>;
+
+// Messages from Script thread to WebVR thread.
+#[derive(Deserialize, Serialize)]
+pub enum WebVRMsg {
+ RegisterContext(PipelineId),
+ UnregisterContext(PipelineId),
+ PollEvents(IpcSender<bool>),
+ GetDisplays(IpcSender<WebVRResult<Vec<VRDisplayData>>>),
+ GetFrameData(PipelineId, u64, f64, f64, IpcSender<WebVRResult<VRFrameData>>),
+ ResetPose(PipelineId, u64, IpcSender<WebVRResult<VRDisplayData>>),
+ RequestPresent(PipelineId, u64, IpcSender<WebVRResult<()>>),
+ ExitPresent(PipelineId, u64, Option<IpcSender<WebVRResult<()>>>),
+ CreateCompositor(u64),
+ Exit,
+}
diff --git a/ports/geckolib/glue.rs b/ports/geckolib/glue.rs
index aeb1fe6484e..68a7b0f7c82 100644
--- a/ports/geckolib/glue.rs
+++ b/ports/geckolib/glue.rs
@@ -521,10 +521,10 @@ pub extern "C" fn Servo_ComputedValues_GetForAnonymousBox(parent_style_or_null:
let maybe_parent = ComputedValues::arc_from_borrowed(&parent_style_or_null);
- let new_computed = data.stylist.precomputed_values_for_pseudo(&pseudo, maybe_parent,
- &data.default_computed_values, false)
- .map(|styles| styles.values);
- new_computed.map_or(Strong::null(), |c| c.into_strong())
+ data.stylist.precomputed_values_for_pseudo(&pseudo, maybe_parent,
+ &data.default_computed_values, false)
+ .values
+ .into_strong()
}
#[no_mangle]
diff --git a/resources/prefs.json b/resources/prefs.json
index f2c45fb35e9..67fea25b136 100644
--- a/resources/prefs.json
+++ b/resources/prefs.json
@@ -7,6 +7,8 @@
"dom.serviceworker.timeout_seconds": 60,
"dom.testable_crash.enabled": false,
"dom.testbinding.enabled": false,
+ "dom.webvr.enabled": false,
+ "dom.webvr.event_polling_interval": 500,
"js.asmjs.enabled": true,
"js.asyncstack.enabled": false,
"js.baseline.enabled": true,
diff --git a/tests/html/webvr/advanced-mirroring.html b/tests/html/webvr/advanced-mirroring.html
new file mode 100644
index 00000000000..d29b94b66dd
--- /dev/null
+++ b/tests/html/webvr/advanced-mirroring.html
@@ -0,0 +1,320 @@
+<!doctype html>
+<!--
+Copyright 2016 The Chromium Authors. All rights reserved.
+Use of this source code is governed by a BSD-style license that can be
+found in the LICENSE file.
+-->
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
+ <meta name="mobile-web-app-capable" content="yes">
+ <meta name="apple-mobile-web-app-capable" content="yes">
+
+ <title>07 - Advanced Mirroring</title>
+
+ <!--
+ This sample demonstrates how to display a different view of the scene on
+ an external monitor than is being viewed through the headset.
+ -->
+
+ <style>
+ #webgl-canvas {
+ box-sizing: border-box;
+ height: 100%;
+ left: 0;
+ margin: 0;
+ position: absolute;
+ top: 0;
+ width: 100%;
+ }
+ </style>
+
+ <!-- This entire block in only to facilitate dynamically enabling and
+ disabling the WebVR polyfill, and is not necessary for most WebVR apps.
+ If you want to use the polyfill in your app, just include the js file and
+ everything will work the way you want it to by default. -->
+ <script>
+ var WebVRConfig = {
+ // Prevents the polyfill from initializing automatically.
+ DEFER_INITIALIZATION: true,
+ // Polyfill optimizations
+ DIRTY_SUBMIT_FRAME_BINDINGS: true,
+ BUFFER_SCALE: 0.75,
+ };
+ </script>
+ <script src="js/third-party/webvr-polyfill.js"></script>
+ <script src="js/third-party/wglu/wglu-url.js"></script>
+ <script>
+ // Dynamically turn the polyfill on if requested by the query args.
+ if (WGLUUrl.getBool('polyfill', false)) {
+ InitializeWebVRPolyfill();
+ } else {
+ // Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
+ InitializeSpecShim();
+ }
+ </script>
+ <!-- End sample polyfill enabling logic -->
+
+ <script src="js/third-party/gl-matrix-min.js"></script>
+
+ <script src="js/third-party/wglu/wglu-debug-geometry.js"></script>
+ <script src="js/third-party/wglu/wglu-program.js"></script>
+ <script src="js/third-party/wglu/wglu-stats.js"></script>
+ <script src="js/third-party/wglu/wglu-texture.js"></script>
+
+ <script src="js/vr-cube-island.js"></script>
+ <script src="js/vr-samples-util.js"></script>
+ </head>
+ <body>
+ <canvas id="webgl-canvas"></canvas>
+ <script>
+ /* global mat4, vec3, VRCubeIsland, WGLUDebugGeometry, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
+ (function () {
+ "use strict";
+
+ var PLAYER_HEIGHT = 1.65;
+
+ var vrDisplay = null;
+ var projectionMat = mat4.create();
+ var viewMat = mat4.create();
+ var poseMat = mat4.create();
+ var tmpMat = mat4.create();
+ var vrPresentButton = null;
+ var orientation = [0, 0, 0, 1];
+ var position = [0, 0, 0];
+
+ // ===================================================
+ // WebGL scene setup. This code is not WebVR specific.
+ // ===================================================
+
+ // WebGL setup.
+ var webglCanvas = document.getElementById("webgl-canvas");
+ var gl = null;
+ var cubeIsland = null;
+ var stats = null;
+ var debugGeom = null;
+
+ function initWebGL () {
+ var glAttribs = {
+ alpha: false,
+ antialias: false //!VRSamplesUtil.isMobile()
+ // When doing mirroring like this, do NOT turn on PreserveDrawingBuffer!
+ };
+ gl = webglCanvas.getContext("webgl", glAttribs);
+ if (!gl) {
+ gl = webglCanvas.getContext("experimental-webgl", glAttribs);
+ if (!gl) {
+ VRSamplesUtil.addError("Your browser does not support WebGL.");
+ return;
+ }
+ }
+ gl.clearColor(0.1, 0.2, 0.3, 1.0);
+ gl.enable(gl.DEPTH_TEST);
+ gl.enable(gl.CULL_FACE);
+
+ var textureLoader = new WGLUTextureLoader(gl);
+ var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
+
+ // Using cubeIsland for this sample because it's easier to see from a
+ // third person view.
+ cubeIsland = new VRCubeIsland(gl, texture, 2, 2);
+
+ stats = new WGLUStats(gl);
+ debugGeom = new WGLUDebugGeometry(gl);
+
+ // Wait until we have a WebGL context to resize and start rendering.
+ window.addEventListener("resize", onResize, false);
+ onResize();
+ window.requestAnimationFrame(onAnimationFrame);
+ }
+
+ // ================================
+ // WebVR-specific code begins here.
+ // ================================
+
+ function onVRRequestPresent () {
+ vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
+ }, function () {
+ VRSamplesUtil.addError("requestPresent failed.", 2000);
+ });
+ }
+
+ function onVRExitPresent () {
+ if (!vrDisplay.isPresenting)
+ return;
+
+ vrDisplay.exitPresent().then(function () {
+ }, function () {
+ VRSamplesUtil.addError("exitPresent failed.", 2000);
+ });
+ }
+
+ function onVRPresentChange () {
+ onResize();
+
+ if (vrDisplay.isPresenting) {
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
+ }
+ } else {
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+ }
+ }
+ }
+
+ var frameData;
+
+ if (navigator.vr) {
+ frameData = new VRFrameData();
+
+ navigator.vr.getDisplays().then(function (displays) {
+ if (displays.length > 0) {
+ vrDisplay = displays[0];
+ vrDisplay.depthNear = 0.1;
+ vrDisplay.depthFar = 1024.0;
+
+ initWebGL();
+
+ if (vrDisplay.stageParameters &&
+ vrDisplay.stageParameters.sizeX > 0 &&
+ vrDisplay.stageParameters.sizeZ > 0) {
+ cubeIsland.resize(vrDisplay.stageParameters.sizeX, vrDisplay.stageParameters.sizeZ);
+ }
+
+ VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
+
+ if (vrDisplay.capabilities.canPresent)
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+
+ vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
+ //vrDisplay.addEventListener('activate', onVRRequestPresent, false);
+ //vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
+ } else {
+ initWebGL();
+ VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
+ }
+ });
+ } else if (navigator.getVRDevices) {
+ initWebGL();
+ VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
+ } else {
+ initWebGL();
+ VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
+ }
+
+ function onResize () {
+ if (vrDisplay && vrDisplay.isPresenting) {
+ var leftEye = vrDisplay.getEyeParameters("left");
+ var rightEye = vrDisplay.getEyeParameters("right");
+
+ webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
+ webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
+ } else {
+ webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
+ webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
+ }
+ }
+
+ function getStandingViewMatrix (out, view) {
+ if (vrDisplay.stageParameters) {
+ mat4.invert(out, vrDisplay.stageParameters.sittingToStandingTransform);
+ mat4.multiply(out, view, out);
+ } else {
+ mat4.identity(out);
+ mat4.translate(out, out, [0, PLAYER_HEIGHT, 0]);
+ mat4.invert(out, out);
+ mat4.multiply(out, view, out);
+ }
+ }
+
+ function getPoseMatrix (out, pose) {
+ orientation = pose.orientation;
+ position = pose.position;
+ if (!orientation) { orientation = [0, 0, 0, 1]; }
+ if (!position) { position = [0, 0, 0]; }
+
+ mat4.fromRotationTranslation(tmpMat, orientation, position);
+ mat4.invert(tmpMat, tmpMat);
+ getStandingViewMatrix(out, tmpMat);
+ mat4.invert(out, out);
+ }
+
+ function renderSceneThirdPersonView (pose) {
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+
+ // Set up the camera in the back left corner of the island
+ mat4.identity(viewMat);
+ mat4.translate(viewMat, viewMat, [-2, 2.5, 2]);
+ mat4.rotateY(viewMat, viewMat, Math.PI * -0.25);
+ mat4.rotateX(viewMat, viewMat, Math.PI * -0.15);
+ mat4.invert(viewMat, viewMat);
+ cubeIsland.render(projectionMat, viewMat, stats);
+
+ // Render a debug view of the headset's position
+ if (pose) {
+ getPoseMatrix(poseMat, pose);
+ mat4.getTranslation(position, poseMat);
+ mat4.getRotation(orientation, poseMat);
+
+ debugGeom.bind(projectionMat, viewMat);
+ debugGeom.drawCube(orientation, position, 0.2, [0, 1, 0, 1]);
+ }
+
+ stats.renderOrtho();
+ }
+
+ function onAnimationFrame (t) {
+ stats.begin();
+
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+
+ if (vrDisplay) {
+ vrDisplay.requestAnimationFrame(onAnimationFrame);
+
+ vrDisplay.getFrameData(frameData);
+
+ if(vrDisplay.isPresenting) {
+ gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
+ getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
+ cubeIsland.render(frameData.leftProjectionMatrix, viewMat, stats);
+
+ gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
+ getStandingViewMatrix(viewMat, frameData.rightViewMatrix);
+ cubeIsland.render(frameData.rightProjectionMatrix, viewMat, stats);
+
+ // VRDisplay.submitFrame
+ vrDisplay.submitFrame();
+
+ // If we have an external display we can render a different version
+ // of the scene entirely after calling submitFrame and it will be
+ // shown on the page. Depending on the content this can be expensive
+ // so this technique should only be used when it will not interfere
+ // with the performance of the VR rendering.
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ renderSceneThirdPersonView(frameData.pose);
+ }
+ } else {
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+ getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
+ cubeIsland.render(projectionMat, viewMat, stats);
+ }
+ } else {
+ window.requestAnimationFrame(onAnimationFrame);
+
+ // No VRDisplay found.
+ renderSceneThirdPersonView(null);
+ }
+
+ stats.end();
+ }
+ })();
+ </script>
+ </body>
+</html>
diff --git a/tests/html/webvr/dynamic-resolution.html b/tests/html/webvr/dynamic-resolution.html
new file mode 100644
index 00000000000..3e071200d7c
--- /dev/null
+++ b/tests/html/webvr/dynamic-resolution.html
@@ -0,0 +1,312 @@
+<!doctype html>
+<!--
+Copyright 2016 The Chromium Authors. All rights reserved.
+Use of this source code is governed by a BSD-style license that can be
+found in the LICENSE file.
+-->
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
+ <meta name="mobile-web-app-capable" content="yes">
+ <meta name="apple-mobile-web-app-capable" content="yes">
+
+ <title>08 - Dynamic Resolution</title>
+
+ <!--
+ This sample demonstrates how to efficiently adjust the resolution of your
+ WebVR scene on the fly using the layer bounds. Based off sample 4b.
+ -->
+
+ <style>
+ body {
+ background-color: black;
+ }
+
+ #canvas-clip, #webgl-canvas {
+ box-sizing: border-box;
+ height: 100%;
+ left: 0;
+ margin: 0;
+ position: absolute;
+ top: 0;
+ width: 100%;
+ }
+
+ #canvas-clip.presenting {
+ overflow: hidden;
+ bottom: 0;
+ right: 0;
+ margin: auto;
+ }
+ </style>
+
+ <!-- This entire block in only to facilitate dynamically enabling and
+ disabling the WebVR polyfill, and is not necessary for most WebVR apps.
+ If you want to use the polyfill in your app, just include the js file and
+ everything will work the way you want it to by default. -->
+ <script>
+ var WebVRConfig = {
+ // Prevents the polyfill from initializing automatically.
+ DEFER_INITIALIZATION: true,
+ // Polyfill optimizations
+ DIRTY_SUBMIT_FRAME_BINDINGS: true,
+ BUFFER_SCALE: 0.75,
+ };
+ </script>
+ <script src="js/third-party/webvr-polyfill.js"></script>
+ <script src="js/third-party/wglu/wglu-url.js"></script>
+ <script>
+ // Dynamically turn the polyfill on if requested by the query args.
+ if (WGLUUrl.getBool('polyfill', false)) {
+ InitializeWebVRPolyfill();
+ } else {
+ // Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
+ InitializeSpecShim();
+ }
+ </script>
+ <!-- End sample polyfill enabling logic -->
+
+ <script src="js/third-party/gl-matrix-min.js"></script>
+
+ <script src="js/third-party/wglu/wglu-program.js"></script>
+ <script src="js/third-party/wglu/wglu-stats.js"></script>
+ <script src="js/third-party/wglu/wglu-texture.js"></script>
+
+ <script src="js/vr-cube-sea.js"></script>
+ <script src="js/vr-samples-util.js"></script>
+ </head>
+ <body>
+ <div id="canvas-clip">
+ <canvas id="webgl-canvas"></canvas>
+ </div>
+ <script>
+ /* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
+ (function () {
+ "use strict";
+
+ var vrDisplay = null;
+ var frameData = null;
+ var projectionMat = mat4.create();
+ var viewMat = mat4.create();
+ var vrPresentButton = null;
+
+ // ================================
+ // WebVR-specific code begins here.
+ // ================================
+
+ // WebGL setup.
+ var webglCanvas = document.getElementById("webgl-canvas");
+ var canvasClip = document.getElementById("canvas-clip");
+ var gl = null;
+ var cubeSea = null;
+ var stats = null;
+
+ function initWebGL (preserveDrawingBuffer) {
+ var glAttribs = {
+ alpha: false,
+ antialias: false, //!VRSamplesUtil.isMobile(),
+ preserveDrawingBuffer: false //preserveDrawingBuffer
+ };
+ gl = webglCanvas.getContext("webgl", glAttribs);
+ if (!gl) {
+ gl = webglCanvas.getContext("experimental-webgl", glAttribs);
+ if (!gl) {
+ VRSamplesUtil.addError("Your browser does not support WebGL.");
+ return;
+ }
+ }
+ gl.clearColor(0.1, 0.2, 0.3, 1.0);
+ gl.enable(gl.DEPTH_TEST);
+ gl.enable(gl.CULL_FACE);
+
+ var textureLoader = new WGLUTextureLoader(gl);
+ var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
+ cubeSea = new VRCubeSea(gl, texture);
+ stats = new WGLUStats(gl);
+
+ window.addEventListener("resize", onResize, false);
+ onResize();
+ window.requestAnimationFrame(onAnimationFrame);
+ }
+
+ function onVRRequestPresent () {
+ vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
+ }, function () {
+ VRSamplesUtil.addError("requestPresent failed.", 2000);
+ });
+ }
+
+ function onVRExitPresent () {
+ if (!vrDisplay.isPresenting)
+ return;
+ resolutionMultiplier = 1.0;
+ vrDisplay.exitPresent().then(function () {
+ }, function () {
+ VRSamplesUtil.addError("exitPresent failed.", 2000);
+ });
+ }
+
+ function onVRPresentChange () {
+ if (vrDisplay.isPresenting) {
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
+ canvasClip.classList.add("presenting");
+
+ var leftEye = vrDisplay.getEyeParameters("left");
+ canvasClip.style.width = (leftEye.renderWidth/2) + "px";
+ canvasClip.style.height = (leftEye.renderHeight/2) + "px";
+ }
+ } else {
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+
+ canvasClip.classList.remove("presenting");
+ canvasClip.style.width = "";
+ canvasClip.style.height = "";
+ webglCanvas.style.width = "";
+ webglCanvas.style.height = "";
+ }
+ }
+
+ // Make sure the canvas is resized AFTER we've updated the container div.
+ onResize();
+ }
+
+ if (navigator.vr) {
+ frameData = new VRFrameData();
+
+ navigator.vr.getDisplays().then(function (displays) {
+ if (displays.length > 0) {
+ vrDisplay = displays[0];
+ VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
+
+ if (vrDisplay.capabilities.canPresent)
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+
+ vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
+ //vrDisplay.addEventListener('activate', onVRRequestPresent, false);
+ //vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
+
+ initWebGL(vrDisplay.capabilities.hasExternalDisplay);
+ } else {
+ initWebGL(false);
+ VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
+ }
+ });
+ } else if (navigator.getVRDevices) {
+ initWebGL(false);
+ VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
+ } else {
+ initWebGL(false);
+ VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
+ }
+
+ // How
+
+
+ function onResize () {
+ if (vrDisplay && vrDisplay.isPresenting) {
+ var leftEye = vrDisplay.getEyeParameters("left");
+ var rightEye = vrDisplay.getEyeParameters("right");
+
+ webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
+ webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
+ } else {
+ webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
+ webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
+ }
+ }
+
+ // How large our frame should be in relation to the recommended render
+ // target size.
+ var resolutionMultiplier = 1.0;
+ var eyeWidth, eyeHeight;
+ var lastAdjustment = 0;
+
+ function adjustResolution(t) {
+ // Update the resolution every quarter second
+ if (t - lastAdjustment < 100)
+ return;
+ lastAdjustment = t;
+
+ // Modify the resolution we are rendering at over time on a sin wave.
+ // In the real world this would probably be based on scene complexity.
+ // Oscillates between 1.0 to 0.5.
+ resolutionMultiplier = (Math.sin(t / 1000) * 0.25) + 0.75;
+
+ eyeWidth = webglCanvas.width * 0.5 * resolutionMultiplier;
+ eyeHeight = webglCanvas.height * resolutionMultiplier;
+
+ // Layer bounds are described in UV space, so 0.0 to 1.0
+ var boundsWidth = 0.5 * resolutionMultiplier;
+ var boundsHeight = resolutionMultiplier;
+
+ // Tell the presenting display about the new texture bounds. This
+ // ensures it only picks up the parts of the texture we're going to be
+ // rendering to and avoids the need to resize the WebGL canvas, which
+ // can be a slow operation. Because we're already presenting when we
+ // call requestPresent again it only updates the VRLayer information and
+ // doesn't require a user gesture.
+ vrDisplay.requestPresent([{
+ source: webglCanvas,
+ leftBounds: [0.0, 0.0, boundsWidth, boundsHeight],
+ rightBounds: [boundsWidth, 0.0, boundsWidth, boundsHeight],
+ }]);
+
+ // To ensure our mirrored content also shows up correctly we'll scale
+ // the canvas display size to be scaled appropriately such that it
+ // continues to only show one eye.
+ webglCanvas.style.width = (1.0/resolutionMultiplier) * 200 + "%";
+ webglCanvas.style.height = (1.0/resolutionMultiplier) * 100 + "%";
+ //webglCanvas.style.marginTop = ((eyeHeight - webglCanvas.height)* resolutionMultiplier) + "px";
+ }
+
+ function onAnimationFrame (t) {
+ stats.begin();
+
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+
+ if (vrDisplay) {
+ vrDisplay.requestAnimationFrame(onAnimationFrame);
+
+ vrDisplay.getFrameData(frameData);
+
+ if (vrDisplay.isPresenting) {
+ adjustResolution(t);
+
+ // Note that the viewports use the eyeWidth/height rather than the
+ // canvas width and height.
+ gl.viewport(0, webglCanvas.height-eyeHeight, eyeWidth, eyeHeight);
+ cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats);
+
+ gl.viewport(eyeWidth, webglCanvas.height-eyeHeight, eyeWidth, eyeHeight);
+ cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats);
+
+ vrDisplay.submitFrame();
+ } else {
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+ cubeSea.render(projectionMat, frameData.leftViewMatrix, stats);
+ stats.renderOrtho();
+ }
+ } else {
+ window.requestAnimationFrame(onAnimationFrame);
+
+ // No VRDisplay found.
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+ mat4.identity(viewMat);
+ cubeSea.render(projectionMat, viewMat, stats);
+
+ stats.renderOrtho();
+ }
+
+ stats.end();
+ }
+ })();
+ </script>
+ </body>
+</html>
diff --git a/tests/html/webvr/js/third-party/gl-matrix-min.js b/tests/html/webvr/js/third-party/gl-matrix-min.js
new file mode 100644
index 00000000000..697bb5ceb32
--- /dev/null
+++ b/tests/html/webvr/js/third-party/gl-matrix-min.js
@@ -0,0 +1,29 @@
+/**
+ * @fileoverview gl-matrix - High performance matrix and vector operations
+ * @author Brandon Jones
+ * @author Colin MacKenzie IV
+ * @version 2.3.2
+ */
+
+/* Copyright (c) 2015, Brandon Jones, Colin MacKenzie IV.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE. */
+
+!function(t,a){if("object"==typeof exports&&"object"==typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var n=a();for(var r in n)("object"==typeof exports?exports:t)[r]=n[r]}}(this,function(){return function(t){function a(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,a),o.loaded=!0,o.exports}var n={};return a.m=t,a.c=n,a.p="",a(0)}([function(t,a,n){a.glMatrix=n(1),a.mat2=n(2),a.mat2d=n(3),a.mat3=n(4),a.mat4=n(5),a.quat=n(6),a.vec2=n(9),a.vec3=n(7),a.vec4=n(8)},function(t,a){var n={};n.EPSILON=1e-6,n.ARRAY_TYPE="undefined"!=typeof Float32Array?Float32Array:Array,n.RANDOM=Math.random,n.ENABLE_SIMD=!1,n.SIMD_AVAILABLE=n.ARRAY_TYPE===Float32Array&&"SIMD"in this,n.USE_SIMD=n.ENABLE_SIMD&&n.SIMD_AVAILABLE,n.setMatrixArrayType=function(t){n.ARRAY_TYPE=t};var r=Math.PI/180;n.toRadian=function(t){return t*r},n.equals=function(t,a){return Math.abs(t-a)<=n.EPSILON*Math.max(1,Math.abs(t),Math.abs(a))},t.exports=n},function(t,a,n){var r=n(1),o={};o.create=function(){var t=new r.ARRAY_TYPE(4);return t[0]=1,t[1]=0,t[2]=0,t[3]=1,t},o.clone=function(t){var a=new r.ARRAY_TYPE(4);return a[0]=t[0],a[1]=t[1],a[2]=t[2],a[3]=t[3],a},o.copy=function(t,a){return t[0]=a[0],t[1]=a[1],t[2]=a[2],t[3]=a[3],t},o.identity=function(t){return t[0]=1,t[1]=0,t[2]=0,t[3]=1,t},o.fromValues=function(t,a,n,o){var u=new r.ARRAY_TYPE(4);return u[0]=t,u[1]=a,u[2]=n,u[3]=o,u},o.set=function(t,a,n,r,o){return t[0]=a,t[1]=n,t[2]=r,t[3]=o,t},o.transpose=function(t,a){if(t===a){var n=a[1];t[1]=a[2],t[2]=n}else t[0]=a[0],t[1]=a[2],t[2]=a[1],t[3]=a[3];return t},o.invert=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=n*u-o*r;return l?(l=1/l,t[0]=u*l,t[1]=-r*l,t[2]=-o*l,t[3]=n*l,t):null},o.adjoint=function(t,a){var n=a[0];return t[0]=a[3],t[1]=-a[1],t[2]=-a[2],t[3]=n,t},o.determinant=function(t){return t[0]*t[3]-t[2]*t[1]},o.multiply=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=n[0],M=n[1],s=n[2],i=n[3];return t[0]=r*e+u*M,t[1]=o*e+l*M,t[2]=r*s+u*i,t[3]=o*s+l*i,t},o.mul=o.multiply,o.rotate=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=Math.sin(n),M=Math.cos(n);return t[0]=r*M+u*e,t[1]=o*M+l*e,t[2]=r*-e+u*M,t[3]=o*-e+l*M,t},o.scale=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=n[0],M=n[1];return t[0]=r*e,t[1]=o*e,t[2]=u*M,t[3]=l*M,t},o.fromRotation=function(t,a){var n=Math.sin(a),r=Math.cos(a);return t[0]=r,t[1]=n,t[2]=-n,t[3]=r,t},o.fromScaling=function(t,a){return t[0]=a[0],t[1]=0,t[2]=0,t[3]=a[1],t},o.str=function(t){return"mat2("+t[0]+", "+t[1]+", "+t[2]+", "+t[3]+")"},o.frob=function(t){return Math.sqrt(Math.pow(t[0],2)+Math.pow(t[1],2)+Math.pow(t[2],2)+Math.pow(t[3],2))},o.LDU=function(t,a,n,r){return t[2]=r[2]/r[0],n[0]=r[0],n[1]=r[1],n[3]=r[3]-t[2]*n[1],[t,a,n]},o.add=function(t,a,n){return t[0]=a[0]+n[0],t[1]=a[1]+n[1],t[2]=a[2]+n[2],t[3]=a[3]+n[3],t},o.subtract=function(t,a,n){return t[0]=a[0]-n[0],t[1]=a[1]-n[1],t[2]=a[2]-n[2],t[3]=a[3]-n[3],t},o.sub=o.subtract,o.exactEquals=function(t,a){return t[0]===a[0]&&t[1]===a[1]&&t[2]===a[2]&&t[3]===a[3]},o.equals=function(t,a){var n=t[0],o=t[1],u=t[2],l=t[3],e=a[0],M=a[1],s=a[2],i=a[3];return Math.abs(n-e)<=r.EPSILON*Math.max(1,Math.abs(n),Math.abs(e))&&Math.abs(o-M)<=r.EPSILON*Math.max(1,Math.abs(o),Math.abs(M))&&Math.abs(u-s)<=r.EPSILON*Math.max(1,Math.abs(u),Math.abs(s))&&Math.abs(l-i)<=r.EPSILON*Math.max(1,Math.abs(l),Math.abs(i))},o.multiplyScalar=function(t,a,n){return t[0]=a[0]*n,t[1]=a[1]*n,t[2]=a[2]*n,t[3]=a[3]*n,t},o.multiplyScalarAndAdd=function(t,a,n,r){return t[0]=a[0]+n[0]*r,t[1]=a[1]+n[1]*r,t[2]=a[2]+n[2]*r,t[3]=a[3]+n[3]*r,t},t.exports=o},function(t,a,n){var r=n(1),o={};o.create=function(){var t=new r.ARRAY_TYPE(6);return t[0]=1,t[1]=0,t[2]=0,t[3]=1,t[4]=0,t[5]=0,t},o.clone=function(t){var a=new r.ARRAY_TYPE(6);return a[0]=t[0],a[1]=t[1],a[2]=t[2],a[3]=t[3],a[4]=t[4],a[5]=t[5],a},o.copy=function(t,a){return t[0]=a[0],t[1]=a[1],t[2]=a[2],t[3]=a[3],t[4]=a[4],t[5]=a[5],t},o.identity=function(t){return t[0]=1,t[1]=0,t[2]=0,t[3]=1,t[4]=0,t[5]=0,t},o.fromValues=function(t,a,n,o,u,l){var e=new r.ARRAY_TYPE(6);return e[0]=t,e[1]=a,e[2]=n,e[3]=o,e[4]=u,e[5]=l,e},o.set=function(t,a,n,r,o,u,l){return t[0]=a,t[1]=n,t[2]=r,t[3]=o,t[4]=u,t[5]=l,t},o.invert=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=a[4],e=a[5],M=n*u-r*o;return M?(M=1/M,t[0]=u*M,t[1]=-r*M,t[2]=-o*M,t[3]=n*M,t[4]=(o*e-u*l)*M,t[5]=(r*l-n*e)*M,t):null},o.determinant=function(t){return t[0]*t[3]-t[1]*t[2]},o.multiply=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=a[4],M=a[5],s=n[0],i=n[1],c=n[2],h=n[3],S=n[4],I=n[5];return t[0]=r*s+u*i,t[1]=o*s+l*i,t[2]=r*c+u*h,t[3]=o*c+l*h,t[4]=r*S+u*I+e,t[5]=o*S+l*I+M,t},o.mul=o.multiply,o.rotate=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=a[4],M=a[5],s=Math.sin(n),i=Math.cos(n);return t[0]=r*i+u*s,t[1]=o*i+l*s,t[2]=r*-s+u*i,t[3]=o*-s+l*i,t[4]=e,t[5]=M,t},o.scale=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=a[4],M=a[5],s=n[0],i=n[1];return t[0]=r*s,t[1]=o*s,t[2]=u*i,t[3]=l*i,t[4]=e,t[5]=M,t},o.translate=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=a[4],M=a[5],s=n[0],i=n[1];return t[0]=r,t[1]=o,t[2]=u,t[3]=l,t[4]=r*s+u*i+e,t[5]=o*s+l*i+M,t},o.fromRotation=function(t,a){var n=Math.sin(a),r=Math.cos(a);return t[0]=r,t[1]=n,t[2]=-n,t[3]=r,t[4]=0,t[5]=0,t},o.fromScaling=function(t,a){return t[0]=a[0],t[1]=0,t[2]=0,t[3]=a[1],t[4]=0,t[5]=0,t},o.fromTranslation=function(t,a){return t[0]=1,t[1]=0,t[2]=0,t[3]=1,t[4]=a[0],t[5]=a[1],t},o.str=function(t){return"mat2d("+t[0]+", "+t[1]+", "+t[2]+", "+t[3]+", "+t[4]+", "+t[5]+")"},o.frob=function(t){return Math.sqrt(Math.pow(t[0],2)+Math.pow(t[1],2)+Math.pow(t[2],2)+Math.pow(t[3],2)+Math.pow(t[4],2)+Math.pow(t[5],2)+1)},o.add=function(t,a,n){return t[0]=a[0]+n[0],t[1]=a[1]+n[1],t[2]=a[2]+n[2],t[3]=a[3]+n[3],t[4]=a[4]+n[4],t[5]=a[5]+n[5],t},o.subtract=function(t,a,n){return t[0]=a[0]-n[0],t[1]=a[1]-n[1],t[2]=a[2]-n[2],t[3]=a[3]-n[3],t[4]=a[4]-n[4],t[5]=a[5]-n[5],t},o.sub=o.subtract,o.multiplyScalar=function(t,a,n){return t[0]=a[0]*n,t[1]=a[1]*n,t[2]=a[2]*n,t[3]=a[3]*n,t[4]=a[4]*n,t[5]=a[5]*n,t},o.multiplyScalarAndAdd=function(t,a,n,r){return t[0]=a[0]+n[0]*r,t[1]=a[1]+n[1]*r,t[2]=a[2]+n[2]*r,t[3]=a[3]+n[3]*r,t[4]=a[4]+n[4]*r,t[5]=a[5]+n[5]*r,t},o.exactEquals=function(t,a){return t[0]===a[0]&&t[1]===a[1]&&t[2]===a[2]&&t[3]===a[3]&&t[4]===a[4]&&t[5]===a[5]},o.equals=function(t,a){var n=t[0],o=t[1],u=t[2],l=t[3],e=t[4],M=t[5],s=a[0],i=a[1],c=a[2],h=a[3],S=a[4],I=a[5];return Math.abs(n-s)<=r.EPSILON*Math.max(1,Math.abs(n),Math.abs(s))&&Math.abs(o-i)<=r.EPSILON*Math.max(1,Math.abs(o),Math.abs(i))&&Math.abs(u-c)<=r.EPSILON*Math.max(1,Math.abs(u),Math.abs(c))&&Math.abs(l-h)<=r.EPSILON*Math.max(1,Math.abs(l),Math.abs(h))&&Math.abs(e-S)<=r.EPSILON*Math.max(1,Math.abs(e),Math.abs(S))&&Math.abs(M-I)<=r.EPSILON*Math.max(1,Math.abs(M),Math.abs(I))},t.exports=o},function(t,a,n){var r=n(1),o={};o.create=function(){var t=new r.ARRAY_TYPE(9);return t[0]=1,t[1]=0,t[2]=0,t[3]=0,t[4]=1,t[5]=0,t[6]=0,t[7]=0,t[8]=1,t},o.fromMat4=function(t,a){return t[0]=a[0],t[1]=a[1],t[2]=a[2],t[3]=a[4],t[4]=a[5],t[5]=a[6],t[6]=a[8],t[7]=a[9],t[8]=a[10],t},o.clone=function(t){var a=new r.ARRAY_TYPE(9);return a[0]=t[0],a[1]=t[1],a[2]=t[2],a[3]=t[3],a[4]=t[4],a[5]=t[5],a[6]=t[6],a[7]=t[7],a[8]=t[8],a},o.copy=function(t,a){return t[0]=a[0],t[1]=a[1],t[2]=a[2],t[3]=a[3],t[4]=a[4],t[5]=a[5],t[6]=a[6],t[7]=a[7],t[8]=a[8],t},o.fromValues=function(t,a,n,o,u,l,e,M,s){var i=new r.ARRAY_TYPE(9);return i[0]=t,i[1]=a,i[2]=n,i[3]=o,i[4]=u,i[5]=l,i[6]=e,i[7]=M,i[8]=s,i},o.set=function(t,a,n,r,o,u,l,e,M,s){return t[0]=a,t[1]=n,t[2]=r,t[3]=o,t[4]=u,t[5]=l,t[6]=e,t[7]=M,t[8]=s,t},o.identity=function(t){return t[0]=1,t[1]=0,t[2]=0,t[3]=0,t[4]=1,t[5]=0,t[6]=0,t[7]=0,t[8]=1,t},o.transpose=function(t,a){if(t===a){var n=a[1],r=a[2],o=a[5];t[1]=a[3],t[2]=a[6],t[3]=n,t[5]=a[7],t[6]=r,t[7]=o}else t[0]=a[0],t[1]=a[3],t[2]=a[6],t[3]=a[1],t[4]=a[4],t[5]=a[7],t[6]=a[2],t[7]=a[5],t[8]=a[8];return t},o.invert=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=a[4],e=a[5],M=a[6],s=a[7],i=a[8],c=i*l-e*s,h=-i*u+e*M,S=s*u-l*M,I=n*c+r*h+o*S;return I?(I=1/I,t[0]=c*I,t[1]=(-i*r+o*s)*I,t[2]=(e*r-o*l)*I,t[3]=h*I,t[4]=(i*n-o*M)*I,t[5]=(-e*n+o*u)*I,t[6]=S*I,t[7]=(-s*n+r*M)*I,t[8]=(l*n-r*u)*I,t):null},o.adjoint=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=a[4],e=a[5],M=a[6],s=a[7],i=a[8];return t[0]=l*i-e*s,t[1]=o*s-r*i,t[2]=r*e-o*l,t[3]=e*M-u*i,t[4]=n*i-o*M,t[5]=o*u-n*e,t[6]=u*s-l*M,t[7]=r*M-n*s,t[8]=n*l-r*u,t},o.determinant=function(t){var a=t[0],n=t[1],r=t[2],o=t[3],u=t[4],l=t[5],e=t[6],M=t[7],s=t[8];return a*(s*u-l*M)+n*(-s*o+l*e)+r*(M*o-u*e)},o.multiply=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=a[4],M=a[5],s=a[6],i=a[7],c=a[8],h=n[0],S=n[1],I=n[2],f=n[3],x=n[4],D=n[5],F=n[6],m=n[7],d=n[8];return t[0]=h*r+S*l+I*s,t[1]=h*o+S*e+I*i,t[2]=h*u+S*M+I*c,t[3]=f*r+x*l+D*s,t[4]=f*o+x*e+D*i,t[5]=f*u+x*M+D*c,t[6]=F*r+m*l+d*s,t[7]=F*o+m*e+d*i,t[8]=F*u+m*M+d*c,t},o.mul=o.multiply,o.translate=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=a[4],M=a[5],s=a[6],i=a[7],c=a[8],h=n[0],S=n[1];return t[0]=r,t[1]=o,t[2]=u,t[3]=l,t[4]=e,t[5]=M,t[6]=h*r+S*l+s,t[7]=h*o+S*e+i,t[8]=h*u+S*M+c,t},o.rotate=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=a[4],M=a[5],s=a[6],i=a[7],c=a[8],h=Math.sin(n),S=Math.cos(n);return t[0]=S*r+h*l,t[1]=S*o+h*e,t[2]=S*u+h*M,t[3]=S*l-h*r,t[4]=S*e-h*o,t[5]=S*M-h*u,t[6]=s,t[7]=i,t[8]=c,t},o.scale=function(t,a,n){var r=n[0],o=n[1];return t[0]=r*a[0],t[1]=r*a[1],t[2]=r*a[2],t[3]=o*a[3],t[4]=o*a[4],t[5]=o*a[5],t[6]=a[6],t[7]=a[7],t[8]=a[8],t},o.fromTranslation=function(t,a){return t[0]=1,t[1]=0,t[2]=0,t[3]=0,t[4]=1,t[5]=0,t[6]=a[0],t[7]=a[1],t[8]=1,t},o.fromRotation=function(t,a){var n=Math.sin(a),r=Math.cos(a);return t[0]=r,t[1]=n,t[2]=0,t[3]=-n,t[4]=r,t[5]=0,t[6]=0,t[7]=0,t[8]=1,t},o.fromScaling=function(t,a){return t[0]=a[0],t[1]=0,t[2]=0,t[3]=0,t[4]=a[1],t[5]=0,t[6]=0,t[7]=0,t[8]=1,t},o.fromMat2d=function(t,a){return t[0]=a[0],t[1]=a[1],t[2]=0,t[3]=a[2],t[4]=a[3],t[5]=0,t[6]=a[4],t[7]=a[5],t[8]=1,t},o.fromQuat=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=n+n,e=r+r,M=o+o,s=n*l,i=r*l,c=r*e,h=o*l,S=o*e,I=o*M,f=u*l,x=u*e,D=u*M;return t[0]=1-c-I,t[3]=i-D,t[6]=h+x,t[1]=i+D,t[4]=1-s-I,t[7]=S-f,t[2]=h-x,t[5]=S+f,t[8]=1-s-c,t},o.normalFromMat4=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=a[4],e=a[5],M=a[6],s=a[7],i=a[8],c=a[9],h=a[10],S=a[11],I=a[12],f=a[13],x=a[14],D=a[15],F=n*e-r*l,m=n*M-o*l,d=n*s-u*l,b=r*M-o*e,v=r*s-u*e,z=o*s-u*M,p=i*f-c*I,w=i*x-h*I,E=i*D-S*I,A=c*x-h*f,P=c*D-S*f,L=h*D-S*x,q=F*L-m*P+d*A+b*E-v*w+z*p;return q?(q=1/q,t[0]=(e*L-M*P+s*A)*q,t[1]=(M*E-l*L-s*w)*q,t[2]=(l*P-e*E+s*p)*q,t[3]=(o*P-r*L-u*A)*q,t[4]=(n*L-o*E+u*w)*q,t[5]=(r*E-n*P-u*p)*q,t[6]=(f*z-x*v+D*b)*q,t[7]=(x*d-I*z-D*m)*q,t[8]=(I*v-f*d+D*F)*q,t):null},o.str=function(t){return"mat3("+t[0]+", "+t[1]+", "+t[2]+", "+t[3]+", "+t[4]+", "+t[5]+", "+t[6]+", "+t[7]+", "+t[8]+")"},o.frob=function(t){return Math.sqrt(Math.pow(t[0],2)+Math.pow(t[1],2)+Math.pow(t[2],2)+Math.pow(t[3],2)+Math.pow(t[4],2)+Math.pow(t[5],2)+Math.pow(t[6],2)+Math.pow(t[7],2)+Math.pow(t[8],2))},o.add=function(t,a,n){return t[0]=a[0]+n[0],t[1]=a[1]+n[1],t[2]=a[2]+n[2],t[3]=a[3]+n[3],t[4]=a[4]+n[4],t[5]=a[5]+n[5],t[6]=a[6]+n[6],t[7]=a[7]+n[7],t[8]=a[8]+n[8],t},o.subtract=function(t,a,n){return t[0]=a[0]-n[0],t[1]=a[1]-n[1],t[2]=a[2]-n[2],t[3]=a[3]-n[3],t[4]=a[4]-n[4],t[5]=a[5]-n[5],t[6]=a[6]-n[6],t[7]=a[7]-n[7],t[8]=a[8]-n[8],t},o.sub=o.subtract,o.multiplyScalar=function(t,a,n){return t[0]=a[0]*n,t[1]=a[1]*n,t[2]=a[2]*n,t[3]=a[3]*n,t[4]=a[4]*n,t[5]=a[5]*n,t[6]=a[6]*n,t[7]=a[7]*n,t[8]=a[8]*n,t},o.multiplyScalarAndAdd=function(t,a,n,r){return t[0]=a[0]+n[0]*r,t[1]=a[1]+n[1]*r,t[2]=a[2]+n[2]*r,t[3]=a[3]+n[3]*r,t[4]=a[4]+n[4]*r,t[5]=a[5]+n[5]*r,t[6]=a[6]+n[6]*r,t[7]=a[7]+n[7]*r,t[8]=a[8]+n[8]*r,t},o.exactEquals=function(t,a){return t[0]===a[0]&&t[1]===a[1]&&t[2]===a[2]&&t[3]===a[3]&&t[4]===a[4]&&t[5]===a[5]&&t[6]===a[6]&&t[7]===a[7]&&t[8]===a[8]},o.equals=function(t,a){var n=t[0],o=t[1],u=t[2],l=t[3],e=t[4],M=t[5],s=t[6],i=t[7],c=t[8],h=a[0],S=a[1],I=a[2],f=a[3],x=a[4],D=a[5],F=t[6],m=a[7],d=a[8];return Math.abs(n-h)<=r.EPSILON*Math.max(1,Math.abs(n),Math.abs(h))&&Math.abs(o-S)<=r.EPSILON*Math.max(1,Math.abs(o),Math.abs(S))&&Math.abs(u-I)<=r.EPSILON*Math.max(1,Math.abs(u),Math.abs(I))&&Math.abs(l-f)<=r.EPSILON*Math.max(1,Math.abs(l),Math.abs(f))&&Math.abs(e-x)<=r.EPSILON*Math.max(1,Math.abs(e),Math.abs(x))&&Math.abs(M-D)<=r.EPSILON*Math.max(1,Math.abs(M),Math.abs(D))&&Math.abs(s-F)<=r.EPSILON*Math.max(1,Math.abs(s),Math.abs(F))&&Math.abs(i-m)<=r.EPSILON*Math.max(1,Math.abs(i),Math.abs(m))&&Math.abs(c-d)<=r.EPSILON*Math.max(1,Math.abs(c),Math.abs(d))},t.exports=o},function(t,a,n){var r=n(1),o={scalar:{},SIMD:{}};o.create=function(){var t=new r.ARRAY_TYPE(16);return t[0]=1,t[1]=0,t[2]=0,t[3]=0,t[4]=0,t[5]=1,t[6]=0,t[7]=0,t[8]=0,t[9]=0,t[10]=1,t[11]=0,t[12]=0,t[13]=0,t[14]=0,t[15]=1,t},o.clone=function(t){var a=new r.ARRAY_TYPE(16);return a[0]=t[0],a[1]=t[1],a[2]=t[2],a[3]=t[3],a[4]=t[4],a[5]=t[5],a[6]=t[6],a[7]=t[7],a[8]=t[8],a[9]=t[9],a[10]=t[10],a[11]=t[11],a[12]=t[12],a[13]=t[13],a[14]=t[14],a[15]=t[15],a},o.copy=function(t,a){return t[0]=a[0],t[1]=a[1],t[2]=a[2],t[3]=a[3],t[4]=a[4],t[5]=a[5],t[6]=a[6],t[7]=a[7],t[8]=a[8],t[9]=a[9],t[10]=a[10],t[11]=a[11],t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15],t},o.fromValues=function(t,a,n,o,u,l,e,M,s,i,c,h,S,I,f,x){var D=new r.ARRAY_TYPE(16);return D[0]=t,D[1]=a,D[2]=n,D[3]=o,D[4]=u,D[5]=l,D[6]=e,D[7]=M,D[8]=s,D[9]=i,D[10]=c,D[11]=h,D[12]=S,D[13]=I,D[14]=f,D[15]=x,D},o.set=function(t,a,n,r,o,u,l,e,M,s,i,c,h,S,I,f,x){return t[0]=a,t[1]=n,t[2]=r,t[3]=o,t[4]=u,t[5]=l,t[6]=e,t[7]=M,t[8]=s,t[9]=i,t[10]=c,t[11]=h,t[12]=S,t[13]=I,t[14]=f,t[15]=x,t},o.identity=function(t){return t[0]=1,t[1]=0,t[2]=0,t[3]=0,t[4]=0,t[5]=1,t[6]=0,t[7]=0,t[8]=0,t[9]=0,t[10]=1,t[11]=0,t[12]=0,t[13]=0,t[14]=0,t[15]=1,t},o.scalar.transpose=function(t,a){if(t===a){var n=a[1],r=a[2],o=a[3],u=a[6],l=a[7],e=a[11];t[1]=a[4],t[2]=a[8],t[3]=a[12],t[4]=n,t[6]=a[9],t[7]=a[13],t[8]=r,t[9]=u,t[11]=a[14],t[12]=o,t[13]=l,t[14]=e}else t[0]=a[0],t[1]=a[4],t[2]=a[8],t[3]=a[12],t[4]=a[1],t[5]=a[5],t[6]=a[9],t[7]=a[13],t[8]=a[2],t[9]=a[6],t[10]=a[10],t[11]=a[14],t[12]=a[3],t[13]=a[7],t[14]=a[11],t[15]=a[15];return t},o.SIMD.transpose=function(t,a){var n,r,o,u,l,e,M,s,i,c;return n=SIMD.Float32x4.load(a,0),r=SIMD.Float32x4.load(a,4),o=SIMD.Float32x4.load(a,8),u=SIMD.Float32x4.load(a,12),l=SIMD.Float32x4.shuffle(n,r,0,1,4,5),e=SIMD.Float32x4.shuffle(o,u,0,1,4,5),M=SIMD.Float32x4.shuffle(l,e,0,2,4,6),s=SIMD.Float32x4.shuffle(l,e,1,3,5,7),SIMD.Float32x4.store(t,0,M),SIMD.Float32x4.store(t,4,s),l=SIMD.Float32x4.shuffle(n,r,2,3,6,7),e=SIMD.Float32x4.shuffle(o,u,2,3,6,7),i=SIMD.Float32x4.shuffle(l,e,0,2,4,6),c=SIMD.Float32x4.shuffle(l,e,1,3,5,7),SIMD.Float32x4.store(t,8,i),SIMD.Float32x4.store(t,12,c),t},o.transpose=r.USE_SIMD?o.SIMD.transpose:o.scalar.transpose,o.scalar.invert=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=a[4],e=a[5],M=a[6],s=a[7],i=a[8],c=a[9],h=a[10],S=a[11],I=a[12],f=a[13],x=a[14],D=a[15],F=n*e-r*l,m=n*M-o*l,d=n*s-u*l,b=r*M-o*e,v=r*s-u*e,z=o*s-u*M,p=i*f-c*I,w=i*x-h*I,E=i*D-S*I,A=c*x-h*f,P=c*D-S*f,L=h*D-S*x,q=F*L-m*P+d*A+b*E-v*w+z*p;return q?(q=1/q,t[0]=(e*L-M*P+s*A)*q,t[1]=(o*P-r*L-u*A)*q,t[2]=(f*z-x*v+D*b)*q,t[3]=(h*v-c*z-S*b)*q,t[4]=(M*E-l*L-s*w)*q,t[5]=(n*L-o*E+u*w)*q,t[6]=(x*d-I*z-D*m)*q,t[7]=(i*z-h*d+S*m)*q,t[8]=(l*P-e*E+s*p)*q,t[9]=(r*E-n*P-u*p)*q,t[10]=(I*v-f*d+D*F)*q,t[11]=(c*d-i*v-S*F)*q,t[12]=(e*w-l*A-M*p)*q,t[13]=(n*A-r*w+o*p)*q,t[14]=(f*m-I*b-x*F)*q,t[15]=(i*b-c*m+h*F)*q,t):null},o.SIMD.invert=function(t,a){var n,r,o,u,l,e,M,s,i,c,h=SIMD.Float32x4.load(a,0),S=SIMD.Float32x4.load(a,4),I=SIMD.Float32x4.load(a,8),f=SIMD.Float32x4.load(a,12);return l=SIMD.Float32x4.shuffle(h,S,0,1,4,5),r=SIMD.Float32x4.shuffle(I,f,0,1,4,5),n=SIMD.Float32x4.shuffle(l,r,0,2,4,6),r=SIMD.Float32x4.shuffle(r,l,1,3,5,7),l=SIMD.Float32x4.shuffle(h,S,2,3,6,7),u=SIMD.Float32x4.shuffle(I,f,2,3,6,7),o=SIMD.Float32x4.shuffle(l,u,0,2,4,6),u=SIMD.Float32x4.shuffle(u,l,1,3,5,7),l=SIMD.Float32x4.mul(o,u),l=SIMD.Float32x4.swizzle(l,1,0,3,2),e=SIMD.Float32x4.mul(r,l),M=SIMD.Float32x4.mul(n,l),l=SIMD.Float32x4.swizzle(l,2,3,0,1),e=SIMD.Float32x4.sub(SIMD.Float32x4.mul(r,l),e),M=SIMD.Float32x4.sub(SIMD.Float32x4.mul(n,l),M),M=SIMD.Float32x4.swizzle(M,2,3,0,1),l=SIMD.Float32x4.mul(r,o),l=SIMD.Float32x4.swizzle(l,1,0,3,2),e=SIMD.Float32x4.add(SIMD.Float32x4.mul(u,l),e),i=SIMD.Float32x4.mul(n,l),l=SIMD.Float32x4.swizzle(l,2,3,0,1),e=SIMD.Float32x4.sub(e,SIMD.Float32x4.mul(u,l)),i=SIMD.Float32x4.sub(SIMD.Float32x4.mul(n,l),i),i=SIMD.Float32x4.swizzle(i,2,3,0,1),l=SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(r,2,3,0,1),u),l=SIMD.Float32x4.swizzle(l,1,0,3,2),o=SIMD.Float32x4.swizzle(o,2,3,0,1),e=SIMD.Float32x4.add(SIMD.Float32x4.mul(o,l),e),s=SIMD.Float32x4.mul(n,l),l=SIMD.Float32x4.swizzle(l,2,3,0,1),e=SIMD.Float32x4.sub(e,SIMD.Float32x4.mul(o,l)),s=SIMD.Float32x4.sub(SIMD.Float32x4.mul(n,l),s),s=SIMD.Float32x4.swizzle(s,2,3,0,1),l=SIMD.Float32x4.mul(n,r),l=SIMD.Float32x4.swizzle(l,1,0,3,2),s=SIMD.Float32x4.add(SIMD.Float32x4.mul(u,l),s),i=SIMD.Float32x4.sub(SIMD.Float32x4.mul(o,l),i),l=SIMD.Float32x4.swizzle(l,2,3,0,1),s=SIMD.Float32x4.sub(SIMD.Float32x4.mul(u,l),s),i=SIMD.Float32x4.sub(i,SIMD.Float32x4.mul(o,l)),l=SIMD.Float32x4.mul(n,u),l=SIMD.Float32x4.swizzle(l,1,0,3,2),M=SIMD.Float32x4.sub(M,SIMD.Float32x4.mul(o,l)),s=SIMD.Float32x4.add(SIMD.Float32x4.mul(r,l),s),l=SIMD.Float32x4.swizzle(l,2,3,0,1),M=SIMD.Float32x4.add(SIMD.Float32x4.mul(o,l),M),s=SIMD.Float32x4.sub(s,SIMD.Float32x4.mul(r,l)),l=SIMD.Float32x4.mul(n,o),l=SIMD.Float32x4.swizzle(l,1,0,3,2),M=SIMD.Float32x4.add(SIMD.Float32x4.mul(u,l),M),i=SIMD.Float32x4.sub(i,SIMD.Float32x4.mul(r,l)),l=SIMD.Float32x4.swizzle(l,2,3,0,1),M=SIMD.Float32x4.sub(M,SIMD.Float32x4.mul(u,l)),i=SIMD.Float32x4.add(SIMD.Float32x4.mul(r,l),i),c=SIMD.Float32x4.mul(n,e),c=SIMD.Float32x4.add(SIMD.Float32x4.swizzle(c,2,3,0,1),c),c=SIMD.Float32x4.add(SIMD.Float32x4.swizzle(c,1,0,3,2),c),l=SIMD.Float32x4.reciprocalApproximation(c),c=SIMD.Float32x4.sub(SIMD.Float32x4.add(l,l),SIMD.Float32x4.mul(c,SIMD.Float32x4.mul(l,l))),(c=SIMD.Float32x4.swizzle(c,0,0,0,0))?(SIMD.Float32x4.store(t,0,SIMD.Float32x4.mul(c,e)),SIMD.Float32x4.store(t,4,SIMD.Float32x4.mul(c,M)),SIMD.Float32x4.store(t,8,SIMD.Float32x4.mul(c,s)),SIMD.Float32x4.store(t,12,SIMD.Float32x4.mul(c,i)),t):null},o.invert=r.USE_SIMD?o.SIMD.invert:o.scalar.invert,o.scalar.adjoint=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=a[4],e=a[5],M=a[6],s=a[7],i=a[8],c=a[9],h=a[10],S=a[11],I=a[12],f=a[13],x=a[14],D=a[15];return t[0]=e*(h*D-S*x)-c*(M*D-s*x)+f*(M*S-s*h),t[1]=-(r*(h*D-S*x)-c*(o*D-u*x)+f*(o*S-u*h)),t[2]=r*(M*D-s*x)-e*(o*D-u*x)+f*(o*s-u*M),t[3]=-(r*(M*S-s*h)-e*(o*S-u*h)+c*(o*s-u*M)),t[4]=-(l*(h*D-S*x)-i*(M*D-s*x)+I*(M*S-s*h)),t[5]=n*(h*D-S*x)-i*(o*D-u*x)+I*(o*S-u*h),t[6]=-(n*(M*D-s*x)-l*(o*D-u*x)+I*(o*s-u*M)),t[7]=n*(M*S-s*h)-l*(o*S-u*h)+i*(o*s-u*M),t[8]=l*(c*D-S*f)-i*(e*D-s*f)+I*(e*S-s*c),t[9]=-(n*(c*D-S*f)-i*(r*D-u*f)+I*(r*S-u*c)),t[10]=n*(e*D-s*f)-l*(r*D-u*f)+I*(r*s-u*e),t[11]=-(n*(e*S-s*c)-l*(r*S-u*c)+i*(r*s-u*e)),t[12]=-(l*(c*x-h*f)-i*(e*x-M*f)+I*(e*h-M*c)),t[13]=n*(c*x-h*f)-i*(r*x-o*f)+I*(r*h-o*c),t[14]=-(n*(e*x-M*f)-l*(r*x-o*f)+I*(r*M-o*e)),t[15]=n*(e*h-M*c)-l*(r*h-o*c)+i*(r*M-o*e),t},o.SIMD.adjoint=function(t,a){var n,r,o,u,l,e,M,s,i,c,h,S,I,n=SIMD.Float32x4.load(a,0),r=SIMD.Float32x4.load(a,4),o=SIMD.Float32x4.load(a,8),u=SIMD.Float32x4.load(a,12);return i=SIMD.Float32x4.shuffle(n,r,0,1,4,5),e=SIMD.Float32x4.shuffle(o,u,0,1,4,5),l=SIMD.Float32x4.shuffle(i,e,0,2,4,6),e=SIMD.Float32x4.shuffle(e,i,1,3,5,7),i=SIMD.Float32x4.shuffle(n,r,2,3,6,7),s=SIMD.Float32x4.shuffle(o,u,2,3,6,7),M=SIMD.Float32x4.shuffle(i,s,0,2,4,6),s=SIMD.Float32x4.shuffle(s,i,1,3,5,7),i=SIMD.Float32x4.mul(M,s),i=SIMD.Float32x4.swizzle(i,1,0,3,2),c=SIMD.Float32x4.mul(e,i),h=SIMD.Float32x4.mul(l,i),i=SIMD.Float32x4.swizzle(i,2,3,0,1),c=SIMD.Float32x4.sub(SIMD.Float32x4.mul(e,i),c),h=SIMD.Float32x4.sub(SIMD.Float32x4.mul(l,i),h),h=SIMD.Float32x4.swizzle(h,2,3,0,1),i=SIMD.Float32x4.mul(e,M),i=SIMD.Float32x4.swizzle(i,1,0,3,2),c=SIMD.Float32x4.add(SIMD.Float32x4.mul(s,i),c),I=SIMD.Float32x4.mul(l,i),i=SIMD.Float32x4.swizzle(i,2,3,0,1),c=SIMD.Float32x4.sub(c,SIMD.Float32x4.mul(s,i)),I=SIMD.Float32x4.sub(SIMD.Float32x4.mul(l,i),I),I=SIMD.Float32x4.swizzle(I,2,3,0,1),i=SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(e,2,3,0,1),s),i=SIMD.Float32x4.swizzle(i,1,0,3,2),M=SIMD.Float32x4.swizzle(M,2,3,0,1),c=SIMD.Float32x4.add(SIMD.Float32x4.mul(M,i),c),S=SIMD.Float32x4.mul(l,i),i=SIMD.Float32x4.swizzle(i,2,3,0,1),c=SIMD.Float32x4.sub(c,SIMD.Float32x4.mul(M,i)),S=SIMD.Float32x4.sub(SIMD.Float32x4.mul(l,i),S),S=SIMD.Float32x4.swizzle(S,2,3,0,1),i=SIMD.Float32x4.mul(l,e),i=SIMD.Float32x4.swizzle(i,1,0,3,2),S=SIMD.Float32x4.add(SIMD.Float32x4.mul(s,i),S),I=SIMD.Float32x4.sub(SIMD.Float32x4.mul(M,i),I),i=SIMD.Float32x4.swizzle(i,2,3,0,1),S=SIMD.Float32x4.sub(SIMD.Float32x4.mul(s,i),S),I=SIMD.Float32x4.sub(I,SIMD.Float32x4.mul(M,i)),i=SIMD.Float32x4.mul(l,s),i=SIMD.Float32x4.swizzle(i,1,0,3,2),h=SIMD.Float32x4.sub(h,SIMD.Float32x4.mul(M,i)),S=SIMD.Float32x4.add(SIMD.Float32x4.mul(e,i),S),i=SIMD.Float32x4.swizzle(i,2,3,0,1),h=SIMD.Float32x4.add(SIMD.Float32x4.mul(M,i),h),S=SIMD.Float32x4.sub(S,SIMD.Float32x4.mul(e,i)),i=SIMD.Float32x4.mul(l,M),i=SIMD.Float32x4.swizzle(i,1,0,3,2),h=SIMD.Float32x4.add(SIMD.Float32x4.mul(s,i),h),I=SIMD.Float32x4.sub(I,SIMD.Float32x4.mul(e,i)),i=SIMD.Float32x4.swizzle(i,2,3,0,1),h=SIMD.Float32x4.sub(h,SIMD.Float32x4.mul(s,i)),I=SIMD.Float32x4.add(SIMD.Float32x4.mul(e,i),I),SIMD.Float32x4.store(t,0,c),SIMD.Float32x4.store(t,4,h),SIMD.Float32x4.store(t,8,S),SIMD.Float32x4.store(t,12,I),t},o.adjoint=r.USE_SIMD?o.SIMD.adjoint:o.scalar.adjoint,o.determinant=function(t){var a=t[0],n=t[1],r=t[2],o=t[3],u=t[4],l=t[5],e=t[6],M=t[7],s=t[8],i=t[9],c=t[10],h=t[11],S=t[12],I=t[13],f=t[14],x=t[15],D=a*l-n*u,F=a*e-r*u,m=a*M-o*u,d=n*e-r*l,b=n*M-o*l,v=r*M-o*e,z=s*I-i*S,p=s*f-c*S,w=s*x-h*S,E=i*f-c*I,A=i*x-h*I,P=c*x-h*f;return D*P-F*A+m*E+d*w-b*p+v*z},o.SIMD.multiply=function(t,a,n){var r=SIMD.Float32x4.load(a,0),o=SIMD.Float32x4.load(a,4),u=SIMD.Float32x4.load(a,8),l=SIMD.Float32x4.load(a,12),e=SIMD.Float32x4.load(n,0),M=SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(e,0,0,0,0),r),SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(e,1,1,1,1),o),SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(e,2,2,2,2),u),SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(e,3,3,3,3),l))));SIMD.Float32x4.store(t,0,M);var s=SIMD.Float32x4.load(n,4),i=SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(s,0,0,0,0),r),SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(s,1,1,1,1),o),SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(s,2,2,2,2),u),SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(s,3,3,3,3),l))));SIMD.Float32x4.store(t,4,i);var c=SIMD.Float32x4.load(n,8),h=SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(c,0,0,0,0),r),SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(c,1,1,1,1),o),SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(c,2,2,2,2),u),SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(c,3,3,3,3),l))));SIMD.Float32x4.store(t,8,h);var S=SIMD.Float32x4.load(n,12),I=SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(S,0,0,0,0),r),SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(S,1,1,1,1),o),SIMD.Float32x4.add(SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(S,2,2,2,2),u),SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(S,3,3,3,3),l))));return SIMD.Float32x4.store(t,12,I),t},o.scalar.multiply=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=a[4],M=a[5],s=a[6],i=a[7],c=a[8],h=a[9],S=a[10],I=a[11],f=a[12],x=a[13],D=a[14],F=a[15],m=n[0],d=n[1],b=n[2],v=n[3];return t[0]=m*r+d*e+b*c+v*f,t[1]=m*o+d*M+b*h+v*x,t[2]=m*u+d*s+b*S+v*D,t[3]=m*l+d*i+b*I+v*F,m=n[4],d=n[5],b=n[6],v=n[7],t[4]=m*r+d*e+b*c+v*f,t[5]=m*o+d*M+b*h+v*x,t[6]=m*u+d*s+b*S+v*D,t[7]=m*l+d*i+b*I+v*F,m=n[8],d=n[9],b=n[10],v=n[11],t[8]=m*r+d*e+b*c+v*f,t[9]=m*o+d*M+b*h+v*x,t[10]=m*u+d*s+b*S+v*D,t[11]=m*l+d*i+b*I+v*F,m=n[12],d=n[13],b=n[14],v=n[15],t[12]=m*r+d*e+b*c+v*f,t[13]=m*o+d*M+b*h+v*x,t[14]=m*u+d*s+b*S+v*D,t[15]=m*l+d*i+b*I+v*F,t},o.multiply=r.USE_SIMD?o.SIMD.multiply:o.scalar.multiply,o.mul=o.multiply,o.scalar.translate=function(t,a,n){var r,o,u,l,e,M,s,i,c,h,S,I,f=n[0],x=n[1],D=n[2];return a===t?(t[12]=a[0]*f+a[4]*x+a[8]*D+a[12],t[13]=a[1]*f+a[5]*x+a[9]*D+a[13],t[14]=a[2]*f+a[6]*x+a[10]*D+a[14],t[15]=a[3]*f+a[7]*x+a[11]*D+a[15]):(r=a[0],o=a[1],u=a[2],l=a[3],e=a[4],M=a[5],s=a[6],i=a[7],c=a[8],h=a[9],S=a[10],I=a[11],t[0]=r,t[1]=o,t[2]=u,t[3]=l,t[4]=e,t[5]=M,t[6]=s,t[7]=i,t[8]=c,t[9]=h,t[10]=S,t[11]=I,t[12]=r*f+e*x+c*D+a[12],t[13]=o*f+M*x+h*D+a[13],t[14]=u*f+s*x+S*D+a[14],t[15]=l*f+i*x+I*D+a[15]),t},o.SIMD.translate=function(t,a,n){var r=SIMD.Float32x4.load(a,0),o=SIMD.Float32x4.load(a,4),u=SIMD.Float32x4.load(a,8),l=SIMD.Float32x4.load(a,12),e=SIMD.Float32x4(n[0],n[1],n[2],0);a!==t&&(t[0]=a[0],t[1]=a[1],t[2]=a[2],t[3]=a[3],t[4]=a[4],t[5]=a[5],t[6]=a[6],t[7]=a[7],t[8]=a[8],t[9]=a[9],t[10]=a[10],t[11]=a[11]),r=SIMD.Float32x4.mul(r,SIMD.Float32x4.swizzle(e,0,0,0,0)),o=SIMD.Float32x4.mul(o,SIMD.Float32x4.swizzle(e,1,1,1,1)),u=SIMD.Float32x4.mul(u,SIMD.Float32x4.swizzle(e,2,2,2,2));var M=SIMD.Float32x4.add(r,SIMD.Float32x4.add(o,SIMD.Float32x4.add(u,l)));return SIMD.Float32x4.store(t,12,M),t},o.translate=r.USE_SIMD?o.SIMD.translate:o.scalar.translate,o.scalar.scale=function(t,a,n){var r=n[0],o=n[1],u=n[2];return t[0]=a[0]*r,t[1]=a[1]*r,t[2]=a[2]*r,t[3]=a[3]*r,t[4]=a[4]*o,t[5]=a[5]*o,t[6]=a[6]*o,t[7]=a[7]*o,t[8]=a[8]*u,t[9]=a[9]*u,t[10]=a[10]*u,t[11]=a[11]*u,t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15],t},o.SIMD.scale=function(t,a,n){var r,o,u,l=SIMD.Float32x4(n[0],n[1],n[2],0);return r=SIMD.Float32x4.load(a,0),SIMD.Float32x4.store(t,0,SIMD.Float32x4.mul(r,SIMD.Float32x4.swizzle(l,0,0,0,0))),o=SIMD.Float32x4.load(a,4),SIMD.Float32x4.store(t,4,SIMD.Float32x4.mul(o,SIMD.Float32x4.swizzle(l,1,1,1,1))),u=SIMD.Float32x4.load(a,8),SIMD.Float32x4.store(t,8,SIMD.Float32x4.mul(u,SIMD.Float32x4.swizzle(l,2,2,2,2))),t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15],t},o.scale=r.USE_SIMD?o.SIMD.scale:o.scalar.scale,o.rotate=function(t,a,n,o){var u,l,e,M,s,i,c,h,S,I,f,x,D,F,m,d,b,v,z,p,w,E,A,P,L=o[0],q=o[1],R=o[2],N=Math.sqrt(L*L+q*q+R*R);return Math.abs(N)<r.EPSILON?null:(N=1/N,L*=N,q*=N,R*=N,u=Math.sin(n),l=Math.cos(n),e=1-l,M=a[0],s=a[1],i=a[2],c=a[3],h=a[4],S=a[5],I=a[6],f=a[7],x=a[8],D=a[9],F=a[10],m=a[11],d=L*L*e+l,b=q*L*e+R*u,v=R*L*e-q*u,z=L*q*e-R*u,p=q*q*e+l,w=R*q*e+L*u,E=L*R*e+q*u,A=q*R*e-L*u,P=R*R*e+l,t[0]=M*d+h*b+x*v,t[1]=s*d+S*b+D*v,t[2]=i*d+I*b+F*v,t[3]=c*d+f*b+m*v,t[4]=M*z+h*p+x*w,t[5]=s*z+S*p+D*w,t[6]=i*z+I*p+F*w,t[7]=c*z+f*p+m*w,t[8]=M*E+h*A+x*P,t[9]=s*E+S*A+D*P,t[10]=i*E+I*A+F*P,t[11]=c*E+f*A+m*P,a!==t&&(t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15]),t)},o.scalar.rotateX=function(t,a,n){var r=Math.sin(n),o=Math.cos(n),u=a[4],l=a[5],e=a[6],M=a[7],s=a[8],i=a[9],c=a[10],h=a[11];return a!==t&&(t[0]=a[0],t[1]=a[1],t[2]=a[2],t[3]=a[3],t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15]),t[4]=u*o+s*r,t[5]=l*o+i*r,t[6]=e*o+c*r,t[7]=M*o+h*r,t[8]=s*o-u*r,t[9]=i*o-l*r,t[10]=c*o-e*r,t[11]=h*o-M*r,t},o.SIMD.rotateX=function(t,a,n){var r=SIMD.Float32x4.splat(Math.sin(n)),o=SIMD.Float32x4.splat(Math.cos(n));a!==t&&(t[0]=a[0],t[1]=a[1],t[2]=a[2],t[3]=a[3],t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15]);var u=SIMD.Float32x4.load(a,4),l=SIMD.Float32x4.load(a,8);return SIMD.Float32x4.store(t,4,SIMD.Float32x4.add(SIMD.Float32x4.mul(u,o),SIMD.Float32x4.mul(l,r))),SIMD.Float32x4.store(t,8,SIMD.Float32x4.sub(SIMD.Float32x4.mul(l,o),SIMD.Float32x4.mul(u,r))),t},o.rotateX=r.USE_SIMD?o.SIMD.rotateX:o.scalar.rotateX,o.scalar.rotateY=function(t,a,n){var r=Math.sin(n),o=Math.cos(n),u=a[0],l=a[1],e=a[2],M=a[3],s=a[8],i=a[9],c=a[10],h=a[11];return a!==t&&(t[4]=a[4],t[5]=a[5],t[6]=a[6],t[7]=a[7],t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15]),t[0]=u*o-s*r,t[1]=l*o-i*r,t[2]=e*o-c*r,t[3]=M*o-h*r,t[8]=u*r+s*o,t[9]=l*r+i*o,t[10]=e*r+c*o,t[11]=M*r+h*o,t},o.SIMD.rotateY=function(t,a,n){var r=SIMD.Float32x4.splat(Math.sin(n)),o=SIMD.Float32x4.splat(Math.cos(n));a!==t&&(t[4]=a[4],t[5]=a[5],t[6]=a[6],t[7]=a[7],t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15]);var u=SIMD.Float32x4.load(a,0),l=SIMD.Float32x4.load(a,8);return SIMD.Float32x4.store(t,0,SIMD.Float32x4.sub(SIMD.Float32x4.mul(u,o),SIMD.Float32x4.mul(l,r))),SIMD.Float32x4.store(t,8,SIMD.Float32x4.add(SIMD.Float32x4.mul(u,r),SIMD.Float32x4.mul(l,o))),t},o.rotateY=r.USE_SIMD?o.SIMD.rotateY:o.scalar.rotateY,o.scalar.rotateZ=function(t,a,n){var r=Math.sin(n),o=Math.cos(n),u=a[0],l=a[1],e=a[2],M=a[3],s=a[4],i=a[5],c=a[6],h=a[7];return a!==t&&(t[8]=a[8],t[9]=a[9],t[10]=a[10],t[11]=a[11],t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15]),t[0]=u*o+s*r,t[1]=l*o+i*r,t[2]=e*o+c*r,t[3]=M*o+h*r,t[4]=s*o-u*r,t[5]=i*o-l*r,t[6]=c*o-e*r,t[7]=h*o-M*r,t},o.SIMD.rotateZ=function(t,a,n){var r=SIMD.Float32x4.splat(Math.sin(n)),o=SIMD.Float32x4.splat(Math.cos(n));a!==t&&(t[8]=a[8],t[9]=a[9],t[10]=a[10],t[11]=a[11],t[12]=a[12],t[13]=a[13],t[14]=a[14],t[15]=a[15]);var u=SIMD.Float32x4.load(a,0),l=SIMD.Float32x4.load(a,4);return SIMD.Float32x4.store(t,0,SIMD.Float32x4.add(SIMD.Float32x4.mul(u,o),SIMD.Float32x4.mul(l,r))),SIMD.Float32x4.store(t,4,SIMD.Float32x4.sub(SIMD.Float32x4.mul(l,o),SIMD.Float32x4.mul(u,r))),t},o.rotateZ=r.USE_SIMD?o.SIMD.rotateZ:o.scalar.rotateZ,o.fromTranslation=function(t,a){return t[0]=1,t[1]=0,t[2]=0,t[3]=0,t[4]=0,t[5]=1,t[6]=0,t[7]=0,t[8]=0,t[9]=0,t[10]=1,t[11]=0,t[12]=a[0],t[13]=a[1],t[14]=a[2],t[15]=1,t},o.fromScaling=function(t,a){return t[0]=a[0],t[1]=0,t[2]=0,t[3]=0,t[4]=0,t[5]=a[1],t[6]=0,t[7]=0,t[8]=0,t[9]=0,t[10]=a[2],t[11]=0,t[12]=0,t[13]=0,t[14]=0,t[15]=1,t},o.fromRotation=function(t,a,n){var o,u,l,e=n[0],M=n[1],s=n[2],i=Math.sqrt(e*e+M*M+s*s);return Math.abs(i)<r.EPSILON?null:(i=1/i,e*=i,M*=i,s*=i,o=Math.sin(a),u=Math.cos(a),l=1-u,t[0]=e*e*l+u,t[1]=M*e*l+s*o,t[2]=s*e*l-M*o,t[3]=0,t[4]=e*M*l-s*o,t[5]=M*M*l+u,t[6]=s*M*l+e*o,t[7]=0,t[8]=e*s*l+M*o,t[9]=M*s*l-e*o,t[10]=s*s*l+u,t[11]=0,t[12]=0,t[13]=0,t[14]=0,t[15]=1,t)},o.fromXRotation=function(t,a){var n=Math.sin(a),r=Math.cos(a);return t[0]=1,t[1]=0,t[2]=0,t[3]=0,t[4]=0,t[5]=r,t[6]=n,t[7]=0,t[8]=0,t[9]=-n,t[10]=r,t[11]=0,t[12]=0,t[13]=0,t[14]=0,t[15]=1,t},o.fromYRotation=function(t,a){var n=Math.sin(a),r=Math.cos(a);return t[0]=r,t[1]=0,t[2]=-n,t[3]=0,t[4]=0,t[5]=1,t[6]=0,t[7]=0,t[8]=n,t[9]=0,t[10]=r,t[11]=0,t[12]=0,t[13]=0,t[14]=0,t[15]=1,t},o.fromZRotation=function(t,a){var n=Math.sin(a),r=Math.cos(a);return t[0]=r,t[1]=n,t[2]=0,t[3]=0,t[4]=-n,t[5]=r,t[6]=0,t[7]=0,t[8]=0,t[9]=0,t[10]=1,t[11]=0,t[12]=0,t[13]=0,t[14]=0,t[15]=1,t},o.fromRotationTranslation=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=r+r,M=o+o,s=u+u,i=r*e,c=r*M,h=r*s,S=o*M,I=o*s,f=u*s,x=l*e,D=l*M,F=l*s;return t[0]=1-(S+f),t[1]=c+F,t[2]=h-D,t[3]=0,t[4]=c-F,t[5]=1-(i+f),t[6]=I+x,t[7]=0,t[8]=h+D,t[9]=I-x,t[10]=1-(i+S),t[11]=0,t[12]=n[0],t[13]=n[1],t[14]=n[2],t[15]=1,t},o.getTranslation=function(t,a){return t[0]=a[12],t[1]=a[13],t[2]=a[14],t},o.getRotation=function(t,a){var n=a[0]+a[5]+a[10],r=0;return n>0?(r=2*Math.sqrt(n+1),t[3]=.25*r,t[0]=(a[6]-a[9])/r,t[1]=(a[8]-a[2])/r,t[2]=(a[1]-a[4])/r):a[0]>a[5]&a[0]>a[10]?(r=2*Math.sqrt(1+a[0]-a[5]-a[10]),t[3]=(a[6]-a[9])/r,t[0]=.25*r,t[1]=(a[1]+a[4])/r,t[2]=(a[8]+a[2])/r):a[5]>a[10]?(r=2*Math.sqrt(1+a[5]-a[0]-a[10]),t[3]=(a[8]-a[2])/r,t[0]=(a[1]+a[4])/r,t[1]=.25*r,t[2]=(a[6]+a[9])/r):(r=2*Math.sqrt(1+a[10]-a[0]-a[5]),t[3]=(a[1]-a[4])/r,t[0]=(a[8]+a[2])/r,t[1]=(a[6]+a[9])/r,t[2]=.25*r),t},o.fromRotationTranslationScale=function(t,a,n,r){var o=a[0],u=a[1],l=a[2],e=a[3],M=o+o,s=u+u,i=l+l,c=o*M,h=o*s,S=o*i,I=u*s,f=u*i,x=l*i,D=e*M,F=e*s,m=e*i,d=r[0],b=r[1],v=r[2];return t[0]=(1-(I+x))*d,t[1]=(h+m)*d,t[2]=(S-F)*d,t[3]=0,t[4]=(h-m)*b,t[5]=(1-(c+x))*b,t[6]=(f+D)*b,t[7]=0,t[8]=(S+F)*v,t[9]=(f-D)*v,t[10]=(1-(c+I))*v,t[11]=0,t[12]=n[0],t[13]=n[1],t[14]=n[2],t[15]=1,t},o.fromRotationTranslationScaleOrigin=function(t,a,n,r,o){
+var u=a[0],l=a[1],e=a[2],M=a[3],s=u+u,i=l+l,c=e+e,h=u*s,S=u*i,I=u*c,f=l*i,x=l*c,D=e*c,F=M*s,m=M*i,d=M*c,b=r[0],v=r[1],z=r[2],p=o[0],w=o[1],E=o[2];return t[0]=(1-(f+D))*b,t[1]=(S+d)*b,t[2]=(I-m)*b,t[3]=0,t[4]=(S-d)*v,t[5]=(1-(h+D))*v,t[6]=(x+F)*v,t[7]=0,t[8]=(I+m)*z,t[9]=(x-F)*z,t[10]=(1-(h+f))*z,t[11]=0,t[12]=n[0]+p-(t[0]*p+t[4]*w+t[8]*E),t[13]=n[1]+w-(t[1]*p+t[5]*w+t[9]*E),t[14]=n[2]+E-(t[2]*p+t[6]*w+t[10]*E),t[15]=1,t},o.fromQuat=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=n+n,e=r+r,M=o+o,s=n*l,i=r*l,c=r*e,h=o*l,S=o*e,I=o*M,f=u*l,x=u*e,D=u*M;return t[0]=1-c-I,t[1]=i+D,t[2]=h-x,t[3]=0,t[4]=i-D,t[5]=1-s-I,t[6]=S+f,t[7]=0,t[8]=h+x,t[9]=S-f,t[10]=1-s-c,t[11]=0,t[12]=0,t[13]=0,t[14]=0,t[15]=1,t},o.frustum=function(t,a,n,r,o,u,l){var e=1/(n-a),M=1/(o-r),s=1/(u-l);return t[0]=2*u*e,t[1]=0,t[2]=0,t[3]=0,t[4]=0,t[5]=2*u*M,t[6]=0,t[7]=0,t[8]=(n+a)*e,t[9]=(o+r)*M,t[10]=(l+u)*s,t[11]=-1,t[12]=0,t[13]=0,t[14]=l*u*2*s,t[15]=0,t},o.perspective=function(t,a,n,r,o){var u=1/Math.tan(a/2),l=1/(r-o);return t[0]=u/n,t[1]=0,t[2]=0,t[3]=0,t[4]=0,t[5]=u,t[6]=0,t[7]=0,t[8]=0,t[9]=0,t[10]=(o+r)*l,t[11]=-1,t[12]=0,t[13]=0,t[14]=2*o*r*l,t[15]=0,t},o.perspectiveFromFieldOfView=function(t,a,n,r){var o=Math.tan(a.upDegrees*Math.PI/180),u=Math.tan(a.downDegrees*Math.PI/180),l=Math.tan(a.leftDegrees*Math.PI/180),e=Math.tan(a.rightDegrees*Math.PI/180),M=2/(l+e),s=2/(o+u);return t[0]=M,t[1]=0,t[2]=0,t[3]=0,t[4]=0,t[5]=s,t[6]=0,t[7]=0,t[8]=-((l-e)*M*.5),t[9]=(o-u)*s*.5,t[10]=r/(n-r),t[11]=-1,t[12]=0,t[13]=0,t[14]=r*n/(n-r),t[15]=0,t},o.ortho=function(t,a,n,r,o,u,l){var e=1/(a-n),M=1/(r-o),s=1/(u-l);return t[0]=-2*e,t[1]=0,t[2]=0,t[3]=0,t[4]=0,t[5]=-2*M,t[6]=0,t[7]=0,t[8]=0,t[9]=0,t[10]=2*s,t[11]=0,t[12]=(a+n)*e,t[13]=(o+r)*M,t[14]=(l+u)*s,t[15]=1,t},o.lookAt=function(t,a,n,u){var l,e,M,s,i,c,h,S,I,f,x=a[0],D=a[1],F=a[2],m=u[0],d=u[1],b=u[2],v=n[0],z=n[1],p=n[2];return Math.abs(x-v)<r.EPSILON&&Math.abs(D-z)<r.EPSILON&&Math.abs(F-p)<r.EPSILON?o.identity(t):(h=x-v,S=D-z,I=F-p,f=1/Math.sqrt(h*h+S*S+I*I),h*=f,S*=f,I*=f,l=d*I-b*S,e=b*h-m*I,M=m*S-d*h,f=Math.sqrt(l*l+e*e+M*M),f?(f=1/f,l*=f,e*=f,M*=f):(l=0,e=0,M=0),s=S*M-I*e,i=I*l-h*M,c=h*e-S*l,f=Math.sqrt(s*s+i*i+c*c),f?(f=1/f,s*=f,i*=f,c*=f):(s=0,i=0,c=0),t[0]=l,t[1]=s,t[2]=h,t[3]=0,t[4]=e,t[5]=i,t[6]=S,t[7]=0,t[8]=M,t[9]=c,t[10]=I,t[11]=0,t[12]=-(l*x+e*D+M*F),t[13]=-(s*x+i*D+c*F),t[14]=-(h*x+S*D+I*F),t[15]=1,t)},o.str=function(t){return"mat4("+t[0]+", "+t[1]+", "+t[2]+", "+t[3]+", "+t[4]+", "+t[5]+", "+t[6]+", "+t[7]+", "+t[8]+", "+t[9]+", "+t[10]+", "+t[11]+", "+t[12]+", "+t[13]+", "+t[14]+", "+t[15]+")"},o.frob=function(t){return Math.sqrt(Math.pow(t[0],2)+Math.pow(t[1],2)+Math.pow(t[2],2)+Math.pow(t[3],2)+Math.pow(t[4],2)+Math.pow(t[5],2)+Math.pow(t[6],2)+Math.pow(t[7],2)+Math.pow(t[8],2)+Math.pow(t[9],2)+Math.pow(t[10],2)+Math.pow(t[11],2)+Math.pow(t[12],2)+Math.pow(t[13],2)+Math.pow(t[14],2)+Math.pow(t[15],2))},o.add=function(t,a,n){return t[0]=a[0]+n[0],t[1]=a[1]+n[1],t[2]=a[2]+n[2],t[3]=a[3]+n[3],t[4]=a[4]+n[4],t[5]=a[5]+n[5],t[6]=a[6]+n[6],t[7]=a[7]+n[7],t[8]=a[8]+n[8],t[9]=a[9]+n[9],t[10]=a[10]+n[10],t[11]=a[11]+n[11],t[12]=a[12]+n[12],t[13]=a[13]+n[13],t[14]=a[14]+n[14],t[15]=a[15]+n[15],t},o.subtract=function(t,a,n){return t[0]=a[0]-n[0],t[1]=a[1]-n[1],t[2]=a[2]-n[2],t[3]=a[3]-n[3],t[4]=a[4]-n[4],t[5]=a[5]-n[5],t[6]=a[6]-n[6],t[7]=a[7]-n[7],t[8]=a[8]-n[8],t[9]=a[9]-n[9],t[10]=a[10]-n[10],t[11]=a[11]-n[11],t[12]=a[12]-n[12],t[13]=a[13]-n[13],t[14]=a[14]-n[14],t[15]=a[15]-n[15],t},o.sub=o.subtract,o.multiplyScalar=function(t,a,n){return t[0]=a[0]*n,t[1]=a[1]*n,t[2]=a[2]*n,t[3]=a[3]*n,t[4]=a[4]*n,t[5]=a[5]*n,t[6]=a[6]*n,t[7]=a[7]*n,t[8]=a[8]*n,t[9]=a[9]*n,t[10]=a[10]*n,t[11]=a[11]*n,t[12]=a[12]*n,t[13]=a[13]*n,t[14]=a[14]*n,t[15]=a[15]*n,t},o.multiplyScalarAndAdd=function(t,a,n,r){return t[0]=a[0]+n[0]*r,t[1]=a[1]+n[1]*r,t[2]=a[2]+n[2]*r,t[3]=a[3]+n[3]*r,t[4]=a[4]+n[4]*r,t[5]=a[5]+n[5]*r,t[6]=a[6]+n[6]*r,t[7]=a[7]+n[7]*r,t[8]=a[8]+n[8]*r,t[9]=a[9]+n[9]*r,t[10]=a[10]+n[10]*r,t[11]=a[11]+n[11]*r,t[12]=a[12]+n[12]*r,t[13]=a[13]+n[13]*r,t[14]=a[14]+n[14]*r,t[15]=a[15]+n[15]*r,t},o.exactEquals=function(t,a){return t[0]===a[0]&&t[1]===a[1]&&t[2]===a[2]&&t[3]===a[3]&&t[4]===a[4]&&t[5]===a[5]&&t[6]===a[6]&&t[7]===a[7]&&t[8]===a[8]&&t[9]===a[9]&&t[10]===a[10]&&t[11]===a[11]&&t[12]===a[12]&&t[13]===a[13]&&t[14]===a[14]&&t[15]===a[15]},o.equals=function(t,a){var n=t[0],o=t[1],u=t[2],l=t[3],e=t[4],M=t[5],s=t[6],i=t[7],c=t[8],h=t[9],S=t[10],I=t[11],f=t[12],x=t[13],D=t[14],F=t[15],m=a[0],d=a[1],b=a[2],v=a[3],z=a[4],p=a[5],w=a[6],E=a[7],A=a[8],P=a[9],L=a[10],q=a[11],R=a[12],N=a[13],O=a[14],Y=a[15];return Math.abs(n-m)<=r.EPSILON*Math.max(1,Math.abs(n),Math.abs(m))&&Math.abs(o-d)<=r.EPSILON*Math.max(1,Math.abs(o),Math.abs(d))&&Math.abs(u-b)<=r.EPSILON*Math.max(1,Math.abs(u),Math.abs(b))&&Math.abs(l-v)<=r.EPSILON*Math.max(1,Math.abs(l),Math.abs(v))&&Math.abs(e-z)<=r.EPSILON*Math.max(1,Math.abs(e),Math.abs(z))&&Math.abs(M-p)<=r.EPSILON*Math.max(1,Math.abs(M),Math.abs(p))&&Math.abs(s-w)<=r.EPSILON*Math.max(1,Math.abs(s),Math.abs(w))&&Math.abs(i-E)<=r.EPSILON*Math.max(1,Math.abs(i),Math.abs(E))&&Math.abs(c-A)<=r.EPSILON*Math.max(1,Math.abs(c),Math.abs(A))&&Math.abs(h-P)<=r.EPSILON*Math.max(1,Math.abs(h),Math.abs(P))&&Math.abs(S-L)<=r.EPSILON*Math.max(1,Math.abs(S),Math.abs(L))&&Math.abs(I-q)<=r.EPSILON*Math.max(1,Math.abs(I),Math.abs(q))&&Math.abs(f-R)<=r.EPSILON*Math.max(1,Math.abs(f),Math.abs(R))&&Math.abs(x-N)<=r.EPSILON*Math.max(1,Math.abs(x),Math.abs(N))&&Math.abs(D-O)<=r.EPSILON*Math.max(1,Math.abs(D),Math.abs(O))&&Math.abs(F-Y)<=r.EPSILON*Math.max(1,Math.abs(F),Math.abs(Y))},t.exports=o},function(t,a,n){var r=n(1),o=n(4),u=n(7),l=n(8),e={};e.create=function(){var t=new r.ARRAY_TYPE(4);return t[0]=0,t[1]=0,t[2]=0,t[3]=1,t},e.rotationTo=function(){var t=u.create(),a=u.fromValues(1,0,0),n=u.fromValues(0,1,0);return function(r,o,l){var M=u.dot(o,l);return-.999999>M?(u.cross(t,a,o),u.length(t)<1e-6&&u.cross(t,n,o),u.normalize(t,t),e.setAxisAngle(r,t,Math.PI),r):M>.999999?(r[0]=0,r[1]=0,r[2]=0,r[3]=1,r):(u.cross(t,o,l),r[0]=t[0],r[1]=t[1],r[2]=t[2],r[3]=1+M,e.normalize(r,r))}}(),e.setAxes=function(){var t=o.create();return function(a,n,r,o){return t[0]=r[0],t[3]=r[1],t[6]=r[2],t[1]=o[0],t[4]=o[1],t[7]=o[2],t[2]=-n[0],t[5]=-n[1],t[8]=-n[2],e.normalize(a,e.fromMat3(a,t))}}(),e.clone=l.clone,e.fromValues=l.fromValues,e.copy=l.copy,e.set=l.set,e.identity=function(t){return t[0]=0,t[1]=0,t[2]=0,t[3]=1,t},e.setAxisAngle=function(t,a,n){n=.5*n;var r=Math.sin(n);return t[0]=r*a[0],t[1]=r*a[1],t[2]=r*a[2],t[3]=Math.cos(n),t},e.getAxisAngle=function(t,a){var n=2*Math.acos(a[3]),r=Math.sin(n/2);return 0!=r?(t[0]=a[0]/r,t[1]=a[1]/r,t[2]=a[2]/r):(t[0]=1,t[1]=0,t[2]=0),n},e.add=l.add,e.multiply=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3],e=n[0],M=n[1],s=n[2],i=n[3];return t[0]=r*i+l*e+o*s-u*M,t[1]=o*i+l*M+u*e-r*s,t[2]=u*i+l*s+r*M-o*e,t[3]=l*i-r*e-o*M-u*s,t},e.mul=e.multiply,e.scale=l.scale,e.rotateX=function(t,a,n){n*=.5;var r=a[0],o=a[1],u=a[2],l=a[3],e=Math.sin(n),M=Math.cos(n);return t[0]=r*M+l*e,t[1]=o*M+u*e,t[2]=u*M-o*e,t[3]=l*M-r*e,t},e.rotateY=function(t,a,n){n*=.5;var r=a[0],o=a[1],u=a[2],l=a[3],e=Math.sin(n),M=Math.cos(n);return t[0]=r*M-u*e,t[1]=o*M+l*e,t[2]=u*M+r*e,t[3]=l*M-o*e,t},e.rotateZ=function(t,a,n){n*=.5;var r=a[0],o=a[1],u=a[2],l=a[3],e=Math.sin(n),M=Math.cos(n);return t[0]=r*M+o*e,t[1]=o*M-r*e,t[2]=u*M+l*e,t[3]=l*M-u*e,t},e.calculateW=function(t,a){var n=a[0],r=a[1],o=a[2];return t[0]=n,t[1]=r,t[2]=o,t[3]=Math.sqrt(Math.abs(1-n*n-r*r-o*o)),t},e.dot=l.dot,e.lerp=l.lerp,e.slerp=function(t,a,n,r){var o,u,l,e,M,s=a[0],i=a[1],c=a[2],h=a[3],S=n[0],I=n[1],f=n[2],x=n[3];return u=s*S+i*I+c*f+h*x,0>u&&(u=-u,S=-S,I=-I,f=-f,x=-x),1-u>1e-6?(o=Math.acos(u),l=Math.sin(o),e=Math.sin((1-r)*o)/l,M=Math.sin(r*o)/l):(e=1-r,M=r),t[0]=e*s+M*S,t[1]=e*i+M*I,t[2]=e*c+M*f,t[3]=e*h+M*x,t},e.sqlerp=function(){var t=e.create(),a=e.create();return function(n,r,o,u,l,M){return e.slerp(t,r,l,M),e.slerp(a,o,u,M),e.slerp(n,t,a,2*M*(1-M)),n}}(),e.invert=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=n*n+r*r+o*o+u*u,e=l?1/l:0;return t[0]=-n*e,t[1]=-r*e,t[2]=-o*e,t[3]=u*e,t},e.conjugate=function(t,a){return t[0]=-a[0],t[1]=-a[1],t[2]=-a[2],t[3]=a[3],t},e.length=l.length,e.len=e.length,e.squaredLength=l.squaredLength,e.sqrLen=e.squaredLength,e.normalize=l.normalize,e.fromMat3=function(t,a){var n,r=a[0]+a[4]+a[8];if(r>0)n=Math.sqrt(r+1),t[3]=.5*n,n=.5/n,t[0]=(a[5]-a[7])*n,t[1]=(a[6]-a[2])*n,t[2]=(a[1]-a[3])*n;else{var o=0;a[4]>a[0]&&(o=1),a[8]>a[3*o+o]&&(o=2);var u=(o+1)%3,l=(o+2)%3;n=Math.sqrt(a[3*o+o]-a[3*u+u]-a[3*l+l]+1),t[o]=.5*n,n=.5/n,t[3]=(a[3*u+l]-a[3*l+u])*n,t[u]=(a[3*u+o]+a[3*o+u])*n,t[l]=(a[3*l+o]+a[3*o+l])*n}return t},e.str=function(t){return"quat("+t[0]+", "+t[1]+", "+t[2]+", "+t[3]+")"},e.exactEquals=l.exactEquals,e.equals=l.equals,t.exports=e},function(t,a,n){var r=n(1),o={};o.create=function(){var t=new r.ARRAY_TYPE(3);return t[0]=0,t[1]=0,t[2]=0,t},o.clone=function(t){var a=new r.ARRAY_TYPE(3);return a[0]=t[0],a[1]=t[1],a[2]=t[2],a},o.fromValues=function(t,a,n){var o=new r.ARRAY_TYPE(3);return o[0]=t,o[1]=a,o[2]=n,o},o.copy=function(t,a){return t[0]=a[0],t[1]=a[1],t[2]=a[2],t},o.set=function(t,a,n,r){return t[0]=a,t[1]=n,t[2]=r,t},o.add=function(t,a,n){return t[0]=a[0]+n[0],t[1]=a[1]+n[1],t[2]=a[2]+n[2],t},o.subtract=function(t,a,n){return t[0]=a[0]-n[0],t[1]=a[1]-n[1],t[2]=a[2]-n[2],t},o.sub=o.subtract,o.multiply=function(t,a,n){return t[0]=a[0]*n[0],t[1]=a[1]*n[1],t[2]=a[2]*n[2],t},o.mul=o.multiply,o.divide=function(t,a,n){return t[0]=a[0]/n[0],t[1]=a[1]/n[1],t[2]=a[2]/n[2],t},o.div=o.divide,o.ceil=function(t,a){return t[0]=Math.ceil(a[0]),t[1]=Math.ceil(a[1]),t[2]=Math.ceil(a[2]),t},o.floor=function(t,a){return t[0]=Math.floor(a[0]),t[1]=Math.floor(a[1]),t[2]=Math.floor(a[2]),t},o.min=function(t,a,n){return t[0]=Math.min(a[0],n[0]),t[1]=Math.min(a[1],n[1]),t[2]=Math.min(a[2],n[2]),t},o.max=function(t,a,n){return t[0]=Math.max(a[0],n[0]),t[1]=Math.max(a[1],n[1]),t[2]=Math.max(a[2],n[2]),t},o.round=function(t,a){return t[0]=Math.round(a[0]),t[1]=Math.round(a[1]),t[2]=Math.round(a[2]),t},o.scale=function(t,a,n){return t[0]=a[0]*n,t[1]=a[1]*n,t[2]=a[2]*n,t},o.scaleAndAdd=function(t,a,n,r){return t[0]=a[0]+n[0]*r,t[1]=a[1]+n[1]*r,t[2]=a[2]+n[2]*r,t},o.distance=function(t,a){var n=a[0]-t[0],r=a[1]-t[1],o=a[2]-t[2];return Math.sqrt(n*n+r*r+o*o)},o.dist=o.distance,o.squaredDistance=function(t,a){var n=a[0]-t[0],r=a[1]-t[1],o=a[2]-t[2];return n*n+r*r+o*o},o.sqrDist=o.squaredDistance,o.length=function(t){var a=t[0],n=t[1],r=t[2];return Math.sqrt(a*a+n*n+r*r)},o.len=o.length,o.squaredLength=function(t){var a=t[0],n=t[1],r=t[2];return a*a+n*n+r*r},o.sqrLen=o.squaredLength,o.negate=function(t,a){return t[0]=-a[0],t[1]=-a[1],t[2]=-a[2],t},o.inverse=function(t,a){return t[0]=1/a[0],t[1]=1/a[1],t[2]=1/a[2],t},o.normalize=function(t,a){var n=a[0],r=a[1],o=a[2],u=n*n+r*r+o*o;return u>0&&(u=1/Math.sqrt(u),t[0]=a[0]*u,t[1]=a[1]*u,t[2]=a[2]*u),t},o.dot=function(t,a){return t[0]*a[0]+t[1]*a[1]+t[2]*a[2]},o.cross=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=n[0],e=n[1],M=n[2];return t[0]=o*M-u*e,t[1]=u*l-r*M,t[2]=r*e-o*l,t},o.lerp=function(t,a,n,r){var o=a[0],u=a[1],l=a[2];return t[0]=o+r*(n[0]-o),t[1]=u+r*(n[1]-u),t[2]=l+r*(n[2]-l),t},o.hermite=function(t,a,n,r,o,u){var l=u*u,e=l*(2*u-3)+1,M=l*(u-2)+u,s=l*(u-1),i=l*(3-2*u);return t[0]=a[0]*e+n[0]*M+r[0]*s+o[0]*i,t[1]=a[1]*e+n[1]*M+r[1]*s+o[1]*i,t[2]=a[2]*e+n[2]*M+r[2]*s+o[2]*i,t},o.bezier=function(t,a,n,r,o,u){var l=1-u,e=l*l,M=u*u,s=e*l,i=3*u*e,c=3*M*l,h=M*u;return t[0]=a[0]*s+n[0]*i+r[0]*c+o[0]*h,t[1]=a[1]*s+n[1]*i+r[1]*c+o[1]*h,t[2]=a[2]*s+n[2]*i+r[2]*c+o[2]*h,t},o.random=function(t,a){a=a||1;var n=2*r.RANDOM()*Math.PI,o=2*r.RANDOM()-1,u=Math.sqrt(1-o*o)*a;return t[0]=Math.cos(n)*u,t[1]=Math.sin(n)*u,t[2]=o*a,t},o.transformMat4=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=n[3]*r+n[7]*o+n[11]*u+n[15];return l=l||1,t[0]=(n[0]*r+n[4]*o+n[8]*u+n[12])/l,t[1]=(n[1]*r+n[5]*o+n[9]*u+n[13])/l,t[2]=(n[2]*r+n[6]*o+n[10]*u+n[14])/l,t},o.transformMat3=function(t,a,n){var r=a[0],o=a[1],u=a[2];return t[0]=r*n[0]+o*n[3]+u*n[6],t[1]=r*n[1]+o*n[4]+u*n[7],t[2]=r*n[2]+o*n[5]+u*n[8],t},o.transformQuat=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=n[0],e=n[1],M=n[2],s=n[3],i=s*r+e*u-M*o,c=s*o+M*r-l*u,h=s*u+l*o-e*r,S=-l*r-e*o-M*u;return t[0]=i*s+S*-l+c*-M-h*-e,t[1]=c*s+S*-e+h*-l-i*-M,t[2]=h*s+S*-M+i*-e-c*-l,t},o.rotateX=function(t,a,n,r){var o=[],u=[];return o[0]=a[0]-n[0],o[1]=a[1]-n[1],o[2]=a[2]-n[2],u[0]=o[0],u[1]=o[1]*Math.cos(r)-o[2]*Math.sin(r),u[2]=o[1]*Math.sin(r)+o[2]*Math.cos(r),t[0]=u[0]+n[0],t[1]=u[1]+n[1],t[2]=u[2]+n[2],t},o.rotateY=function(t,a,n,r){var o=[],u=[];return o[0]=a[0]-n[0],o[1]=a[1]-n[1],o[2]=a[2]-n[2],u[0]=o[2]*Math.sin(r)+o[0]*Math.cos(r),u[1]=o[1],u[2]=o[2]*Math.cos(r)-o[0]*Math.sin(r),t[0]=u[0]+n[0],t[1]=u[1]+n[1],t[2]=u[2]+n[2],t},o.rotateZ=function(t,a,n,r){var o=[],u=[];return o[0]=a[0]-n[0],o[1]=a[1]-n[1],o[2]=a[2]-n[2],u[0]=o[0]*Math.cos(r)-o[1]*Math.sin(r),u[1]=o[0]*Math.sin(r)+o[1]*Math.cos(r),u[2]=o[2],t[0]=u[0]+n[0],t[1]=u[1]+n[1],t[2]=u[2]+n[2],t},o.forEach=function(){var t=o.create();return function(a,n,r,o,u,l){var e,M;for(n||(n=3),r||(r=0),M=o?Math.min(o*n+r,a.length):a.length,e=r;M>e;e+=n)t[0]=a[e],t[1]=a[e+1],t[2]=a[e+2],u(t,t,l),a[e]=t[0],a[e+1]=t[1],a[e+2]=t[2];return a}}(),o.angle=function(t,a){var n=o.fromValues(t[0],t[1],t[2]),r=o.fromValues(a[0],a[1],a[2]);o.normalize(n,n),o.normalize(r,r);var u=o.dot(n,r);return u>1?0:Math.acos(u)},o.str=function(t){return"vec3("+t[0]+", "+t[1]+", "+t[2]+")"},o.exactEquals=function(t,a){return t[0]===a[0]&&t[1]===a[1]&&t[2]===a[2]},o.equals=function(t,a){var n=t[0],o=t[1],u=t[2],l=a[0],e=a[1],M=a[2];return Math.abs(n-l)<=r.EPSILON*Math.max(1,Math.abs(n),Math.abs(l))&&Math.abs(o-e)<=r.EPSILON*Math.max(1,Math.abs(o),Math.abs(e))&&Math.abs(u-M)<=r.EPSILON*Math.max(1,Math.abs(u),Math.abs(M))},t.exports=o},function(t,a,n){var r=n(1),o={};o.create=function(){var t=new r.ARRAY_TYPE(4);return t[0]=0,t[1]=0,t[2]=0,t[3]=0,t},o.clone=function(t){var a=new r.ARRAY_TYPE(4);return a[0]=t[0],a[1]=t[1],a[2]=t[2],a[3]=t[3],a},o.fromValues=function(t,a,n,o){var u=new r.ARRAY_TYPE(4);return u[0]=t,u[1]=a,u[2]=n,u[3]=o,u},o.copy=function(t,a){return t[0]=a[0],t[1]=a[1],t[2]=a[2],t[3]=a[3],t},o.set=function(t,a,n,r,o){return t[0]=a,t[1]=n,t[2]=r,t[3]=o,t},o.add=function(t,a,n){return t[0]=a[0]+n[0],t[1]=a[1]+n[1],t[2]=a[2]+n[2],t[3]=a[3]+n[3],t},o.subtract=function(t,a,n){return t[0]=a[0]-n[0],t[1]=a[1]-n[1],t[2]=a[2]-n[2],t[3]=a[3]-n[3],t},o.sub=o.subtract,o.multiply=function(t,a,n){return t[0]=a[0]*n[0],t[1]=a[1]*n[1],t[2]=a[2]*n[2],t[3]=a[3]*n[3],t},o.mul=o.multiply,o.divide=function(t,a,n){return t[0]=a[0]/n[0],t[1]=a[1]/n[1],t[2]=a[2]/n[2],t[3]=a[3]/n[3],t},o.div=o.divide,o.ceil=function(t,a){return t[0]=Math.ceil(a[0]),t[1]=Math.ceil(a[1]),t[2]=Math.ceil(a[2]),t[3]=Math.ceil(a[3]),t},o.floor=function(t,a){return t[0]=Math.floor(a[0]),t[1]=Math.floor(a[1]),t[2]=Math.floor(a[2]),t[3]=Math.floor(a[3]),t},o.min=function(t,a,n){return t[0]=Math.min(a[0],n[0]),t[1]=Math.min(a[1],n[1]),t[2]=Math.min(a[2],n[2]),t[3]=Math.min(a[3],n[3]),t},o.max=function(t,a,n){return t[0]=Math.max(a[0],n[0]),t[1]=Math.max(a[1],n[1]),t[2]=Math.max(a[2],n[2]),t[3]=Math.max(a[3],n[3]),t},o.round=function(t,a){return t[0]=Math.round(a[0]),t[1]=Math.round(a[1]),t[2]=Math.round(a[2]),t[3]=Math.round(a[3]),t},o.scale=function(t,a,n){return t[0]=a[0]*n,t[1]=a[1]*n,t[2]=a[2]*n,t[3]=a[3]*n,t},o.scaleAndAdd=function(t,a,n,r){return t[0]=a[0]+n[0]*r,t[1]=a[1]+n[1]*r,t[2]=a[2]+n[2]*r,t[3]=a[3]+n[3]*r,t},o.distance=function(t,a){var n=a[0]-t[0],r=a[1]-t[1],o=a[2]-t[2],u=a[3]-t[3];return Math.sqrt(n*n+r*r+o*o+u*u)},o.dist=o.distance,o.squaredDistance=function(t,a){var n=a[0]-t[0],r=a[1]-t[1],o=a[2]-t[2],u=a[3]-t[3];return n*n+r*r+o*o+u*u},o.sqrDist=o.squaredDistance,o.length=function(t){var a=t[0],n=t[1],r=t[2],o=t[3];return Math.sqrt(a*a+n*n+r*r+o*o)},o.len=o.length,o.squaredLength=function(t){var a=t[0],n=t[1],r=t[2],o=t[3];return a*a+n*n+r*r+o*o},o.sqrLen=o.squaredLength,o.negate=function(t,a){return t[0]=-a[0],t[1]=-a[1],t[2]=-a[2],t[3]=-a[3],t},o.inverse=function(t,a){return t[0]=1/a[0],t[1]=1/a[1],t[2]=1/a[2],t[3]=1/a[3],t},o.normalize=function(t,a){var n=a[0],r=a[1],o=a[2],u=a[3],l=n*n+r*r+o*o+u*u;return l>0&&(l=1/Math.sqrt(l),t[0]=n*l,t[1]=r*l,t[2]=o*l,t[3]=u*l),t},o.dot=function(t,a){return t[0]*a[0]+t[1]*a[1]+t[2]*a[2]+t[3]*a[3]},o.lerp=function(t,a,n,r){var o=a[0],u=a[1],l=a[2],e=a[3];return t[0]=o+r*(n[0]-o),t[1]=u+r*(n[1]-u),t[2]=l+r*(n[2]-l),t[3]=e+r*(n[3]-e),t},o.random=function(t,a){return a=a||1,t[0]=r.RANDOM(),t[1]=r.RANDOM(),t[2]=r.RANDOM(),t[3]=r.RANDOM(),o.normalize(t,t),o.scale(t,t,a),t},o.transformMat4=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=a[3];return t[0]=n[0]*r+n[4]*o+n[8]*u+n[12]*l,t[1]=n[1]*r+n[5]*o+n[9]*u+n[13]*l,t[2]=n[2]*r+n[6]*o+n[10]*u+n[14]*l,t[3]=n[3]*r+n[7]*o+n[11]*u+n[15]*l,t},o.transformQuat=function(t,a,n){var r=a[0],o=a[1],u=a[2],l=n[0],e=n[1],M=n[2],s=n[3],i=s*r+e*u-M*o,c=s*o+M*r-l*u,h=s*u+l*o-e*r,S=-l*r-e*o-M*u;return t[0]=i*s+S*-l+c*-M-h*-e,t[1]=c*s+S*-e+h*-l-i*-M,t[2]=h*s+S*-M+i*-e-c*-l,t[3]=a[3],t},o.forEach=function(){var t=o.create();return function(a,n,r,o,u,l){var e,M;for(n||(n=4),r||(r=0),M=o?Math.min(o*n+r,a.length):a.length,e=r;M>e;e+=n)t[0]=a[e],t[1]=a[e+1],t[2]=a[e+2],t[3]=a[e+3],u(t,t,l),a[e]=t[0],a[e+1]=t[1],a[e+2]=t[2],a[e+3]=t[3];return a}}(),o.str=function(t){return"vec4("+t[0]+", "+t[1]+", "+t[2]+", "+t[3]+")"},o.exactEquals=function(t,a){return t[0]===a[0]&&t[1]===a[1]&&t[2]===a[2]&&t[3]===a[3]},o.equals=function(t,a){var n=t[0],o=t[1],u=t[2],l=t[3],e=a[0],M=a[1],s=a[2],i=a[3];return Math.abs(n-e)<=r.EPSILON*Math.max(1,Math.abs(n),Math.abs(e))&&Math.abs(o-M)<=r.EPSILON*Math.max(1,Math.abs(o),Math.abs(M))&&Math.abs(u-s)<=r.EPSILON*Math.max(1,Math.abs(u),Math.abs(s))&&Math.abs(l-i)<=r.EPSILON*Math.max(1,Math.abs(l),Math.abs(i))},t.exports=o},function(t,a,n){var r=n(1),o={};o.create=function(){var t=new r.ARRAY_TYPE(2);return t[0]=0,t[1]=0,t},o.clone=function(t){var a=new r.ARRAY_TYPE(2);return a[0]=t[0],a[1]=t[1],a},o.fromValues=function(t,a){var n=new r.ARRAY_TYPE(2);return n[0]=t,n[1]=a,n},o.copy=function(t,a){return t[0]=a[0],t[1]=a[1],t},o.set=function(t,a,n){return t[0]=a,t[1]=n,t},o.add=function(t,a,n){return t[0]=a[0]+n[0],t[1]=a[1]+n[1],t},o.subtract=function(t,a,n){return t[0]=a[0]-n[0],t[1]=a[1]-n[1],t},o.sub=o.subtract,o.multiply=function(t,a,n){return t[0]=a[0]*n[0],t[1]=a[1]*n[1],t},o.mul=o.multiply,o.divide=function(t,a,n){return t[0]=a[0]/n[0],t[1]=a[1]/n[1],t},o.div=o.divide,o.ceil=function(t,a){return t[0]=Math.ceil(a[0]),t[1]=Math.ceil(a[1]),t},o.floor=function(t,a){return t[0]=Math.floor(a[0]),t[1]=Math.floor(a[1]),t},o.min=function(t,a,n){return t[0]=Math.min(a[0],n[0]),t[1]=Math.min(a[1],n[1]),t},o.max=function(t,a,n){return t[0]=Math.max(a[0],n[0]),t[1]=Math.max(a[1],n[1]),t},o.round=function(t,a){return t[0]=Math.round(a[0]),t[1]=Math.round(a[1]),t},o.scale=function(t,a,n){return t[0]=a[0]*n,t[1]=a[1]*n,t},o.scaleAndAdd=function(t,a,n,r){return t[0]=a[0]+n[0]*r,t[1]=a[1]+n[1]*r,t},o.distance=function(t,a){var n=a[0]-t[0],r=a[1]-t[1];return Math.sqrt(n*n+r*r)},o.dist=o.distance,o.squaredDistance=function(t,a){var n=a[0]-t[0],r=a[1]-t[1];return n*n+r*r},o.sqrDist=o.squaredDistance,o.length=function(t){var a=t[0],n=t[1];return Math.sqrt(a*a+n*n)},o.len=o.length,o.squaredLength=function(t){var a=t[0],n=t[1];return a*a+n*n},o.sqrLen=o.squaredLength,o.negate=function(t,a){return t[0]=-a[0],t[1]=-a[1],t},o.inverse=function(t,a){return t[0]=1/a[0],t[1]=1/a[1],t},o.normalize=function(t,a){var n=a[0],r=a[1],o=n*n+r*r;return o>0&&(o=1/Math.sqrt(o),t[0]=a[0]*o,t[1]=a[1]*o),t},o.dot=function(t,a){return t[0]*a[0]+t[1]*a[1]},o.cross=function(t,a,n){var r=a[0]*n[1]-a[1]*n[0];return t[0]=t[1]=0,t[2]=r,t},o.lerp=function(t,a,n,r){var o=a[0],u=a[1];return t[0]=o+r*(n[0]-o),t[1]=u+r*(n[1]-u),t},o.random=function(t,a){a=a||1;var n=2*r.RANDOM()*Math.PI;return t[0]=Math.cos(n)*a,t[1]=Math.sin(n)*a,t},o.transformMat2=function(t,a,n){var r=a[0],o=a[1];return t[0]=n[0]*r+n[2]*o,t[1]=n[1]*r+n[3]*o,t},o.transformMat2d=function(t,a,n){var r=a[0],o=a[1];return t[0]=n[0]*r+n[2]*o+n[4],t[1]=n[1]*r+n[3]*o+n[5],t},o.transformMat3=function(t,a,n){var r=a[0],o=a[1];return t[0]=n[0]*r+n[3]*o+n[6],t[1]=n[1]*r+n[4]*o+n[7],t},o.transformMat4=function(t,a,n){var r=a[0],o=a[1];return t[0]=n[0]*r+n[4]*o+n[12],t[1]=n[1]*r+n[5]*o+n[13],t},o.forEach=function(){var t=o.create();return function(a,n,r,o,u,l){var e,M;for(n||(n=2),r||(r=0),M=o?Math.min(o*n+r,a.length):a.length,e=r;M>e;e+=n)t[0]=a[e],t[1]=a[e+1],u(t,t,l),a[e]=t[0],a[e+1]=t[1];return a}}(),o.str=function(t){return"vec2("+t[0]+", "+t[1]+")"},o.exactEquals=function(t,a){return t[0]===a[0]&&t[1]===a[1]},o.equals=function(t,a){var n=t[0],o=t[1],u=a[0],l=a[1];return Math.abs(n-u)<=r.EPSILON*Math.max(1,Math.abs(n),Math.abs(u))&&Math.abs(o-l)<=r.EPSILON*Math.max(1,Math.abs(o),Math.abs(l))},t.exports=o}])}); \ No newline at end of file
diff --git a/tests/html/webvr/js/third-party/webvr-polyfill.js b/tests/html/webvr/js/third-party/webvr-polyfill.js
new file mode 100644
index 00000000000..2e4b2ee29ac
--- /dev/null
+++ b/tests/html/webvr/js/third-party/webvr-polyfill.js
@@ -0,0 +1,5939 @@
+(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){
+'use strict';
+/* eslint-disable no-unused-vars */
+var hasOwnProperty = Object.prototype.hasOwnProperty;
+var propIsEnumerable = Object.prototype.propertyIsEnumerable;
+
+function toObject(val) {
+ if (val === null || val === undefined) {
+ throw new TypeError('Object.assign cannot be called with null or undefined');
+ }
+
+ return Object(val);
+}
+
+function shouldUseNative() {
+ try {
+ if (!Object.assign) {
+ return false;
+ }
+
+ // Detect buggy property enumeration order in older V8 versions.
+
+ // https://bugs.chromium.org/p/v8/issues/detail?id=4118
+ var test1 = new String('abc'); // eslint-disable-line
+ test1[5] = 'de';
+ if (Object.getOwnPropertyNames(test1)[0] === '5') {
+ return false;
+ }
+
+ // https://bugs.chromium.org/p/v8/issues/detail?id=3056
+ var test2 = {};
+ for (var i = 0; i < 10; i++) {
+ test2['_' + String.fromCharCode(i)] = i;
+ }
+ var order2 = Object.getOwnPropertyNames(test2).map(function (n) {
+ return test2[n];
+ });
+ if (order2.join('') !== '0123456789') {
+ return false;
+ }
+
+ // https://bugs.chromium.org/p/v8/issues/detail?id=3056
+ var test3 = {};
+ 'abcdefghijklmnopqrst'.split('').forEach(function (letter) {
+ test3[letter] = letter;
+ });
+ if (Object.keys(Object.assign({}, test3)).join('') !==
+ 'abcdefghijklmnopqrst') {
+ return false;
+ }
+
+ return true;
+ } catch (e) {
+ // We don't expect any of the above to throw, but better to be safe.
+ return false;
+ }
+}
+
+module.exports = shouldUseNative() ? Object.assign : function (target, source) {
+ var from;
+ var to = toObject(target);
+ var symbols;
+
+ for (var s = 1; s < arguments.length; s++) {
+ from = Object(arguments[s]);
+
+ for (var key in from) {
+ if (hasOwnProperty.call(from, key)) {
+ to[key] = from[key];
+ }
+ }
+
+ if (Object.getOwnPropertySymbols) {
+ symbols = Object.getOwnPropertySymbols(from);
+ for (var i = 0; i < symbols.length; i++) {
+ if (propIsEnumerable.call(from, symbols[i])) {
+ to[symbols[i]] = from[symbols[i]];
+ }
+ }
+ }
+ }
+
+ return to;
+};
+
+},{}],2:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var Util = _dereq_('./util.js');
+var WakeLock = _dereq_('./wakelock.js');
+
+// Start at a higher number to reduce chance of conflict.
+var nextDisplayId = 1000;
+var hasShowDeprecationWarning = false;
+
+var defaultLeftBounds = [0, 0, 0.5, 1];
+var defaultRightBounds = [0.5, 0, 0.5, 1];
+
+/**
+ * The base class for all VR frame data.
+ */
+
+function VRFrameData() {
+ this.leftProjectionMatrix = new Float32Array(16);
+ this.leftViewMatrix = new Float32Array(16);
+ this.rightProjectionMatrix = new Float32Array(16);
+ this.rightViewMatrix = new Float32Array(16);
+ this.pose = null;
+};
+
+/**
+ * The base class for all VR displays.
+ */
+function VRDisplay() {
+ this.isPolyfilled = true;
+ this.displayId = nextDisplayId++;
+ this.displayName = 'webvr-polyfill displayName';
+
+ this.depthNear = 0.01;
+ this.depthFar = 10000.0;
+
+ this.isConnected = true;
+ this.isPresenting = false;
+ this.capabilities = {
+ hasPosition: false,
+ hasOrientation: false,
+ hasExternalDisplay: false,
+ canPresent: false,
+ maxLayers: 1
+ };
+ this.stageParameters = null;
+
+ // "Private" members.
+ this.waitingForPresent_ = false;
+ this.layer_ = null;
+
+ this.fullscreenElement_ = null;
+ this.fullscreenWrapper_ = null;
+ this.fullscreenElementCachedStyle_ = null;
+
+ this.fullscreenEventTarget_ = null;
+ this.fullscreenChangeHandler_ = null;
+ this.fullscreenErrorHandler_ = null;
+
+ this.wakelock_ = new WakeLock();
+}
+
+VRDisplay.prototype.getFrameData = function(frameData) {
+ // TODO: Technically this should retain it's value for the duration of a frame
+ // but I doubt that's practical to do in javascript.
+ return Util.frameDataFromPose(frameData, this.getPose(), this);
+};
+
+VRDisplay.prototype.getPose = function() {
+ // TODO: Technically this should retain it's value for the duration of a frame
+ // but I doubt that's practical to do in javascript.
+ return this.getImmediatePose();
+};
+
+VRDisplay.prototype.requestAnimationFrame = function(callback) {
+ return window.requestAnimationFrame(callback);
+};
+
+VRDisplay.prototype.cancelAnimationFrame = function(id) {
+ return window.cancelAnimationFrame(id);
+};
+
+VRDisplay.prototype.wrapForFullscreen = function(element) {
+ // Don't wrap in iOS.
+ if (Util.isIOS()) {
+ return element;
+ }
+ if (!this.fullscreenWrapper_) {
+ this.fullscreenWrapper_ = document.createElement('div');
+ var cssProperties = [
+ 'height: ' + Math.min(screen.height, screen.width) + 'px !important',
+ 'top: 0 !important',
+ 'left: 0 !important',
+ 'right: 0 !important',
+ 'border: 0',
+ 'margin: 0',
+ 'padding: 0',
+ 'z-index: 999999 !important',
+ 'position: fixed',
+ ];
+ this.fullscreenWrapper_.setAttribute('style', cssProperties.join('; ') + ';');
+ this.fullscreenWrapper_.classList.add('webvr-polyfill-fullscreen-wrapper');
+ }
+
+ if (this.fullscreenElement_ == element) {
+ return this.fullscreenWrapper_;
+ }
+
+ // Remove any previously applied wrappers
+ this.removeFullscreenWrapper();
+
+ this.fullscreenElement_ = element;
+ var parent = this.fullscreenElement_.parentElement;
+ parent.insertBefore(this.fullscreenWrapper_, this.fullscreenElement_);
+ parent.removeChild(this.fullscreenElement_);
+ this.fullscreenWrapper_.insertBefore(this.fullscreenElement_, this.fullscreenWrapper_.firstChild);
+ this.fullscreenElementCachedStyle_ = this.fullscreenElement_.getAttribute('style');
+
+ var self = this;
+ function applyFullscreenElementStyle() {
+ if (!self.fullscreenElement_) {
+ return;
+ }
+
+ var cssProperties = [
+ 'position: absolute',
+ 'top: 0',
+ 'left: 0',
+ 'width: ' + Math.max(screen.width, screen.height) + 'px',
+ 'height: ' + Math.min(screen.height, screen.width) + 'px',
+ 'border: 0',
+ 'margin: 0',
+ 'padding: 0',
+ ];
+ self.fullscreenElement_.setAttribute('style', cssProperties.join('; ') + ';');
+ }
+
+ applyFullscreenElementStyle();
+
+ return this.fullscreenWrapper_;
+};
+
+VRDisplay.prototype.removeFullscreenWrapper = function() {
+ if (!this.fullscreenElement_) {
+ return;
+ }
+
+ var element = this.fullscreenElement_;
+ if (this.fullscreenElementCachedStyle_) {
+ element.setAttribute('style', this.fullscreenElementCachedStyle_);
+ } else {
+ element.removeAttribute('style');
+ }
+ this.fullscreenElement_ = null;
+ this.fullscreenElementCachedStyle_ = null;
+
+ var parent = this.fullscreenWrapper_.parentElement;
+ this.fullscreenWrapper_.removeChild(element);
+ parent.insertBefore(element, this.fullscreenWrapper_);
+ parent.removeChild(this.fullscreenWrapper_);
+
+ return element;
+};
+
+VRDisplay.prototype.requestPresent = function(layers) {
+ var wasPresenting = this.isPresenting;
+ var self = this;
+
+ if (!(layers instanceof Array)) {
+ if (!hasShowDeprecationWarning) {
+ console.warn("Using a deprecated form of requestPresent. Should pass in an array of VRLayers.");
+ hasShowDeprecationWarning = true;
+ }
+ layers = [layers];
+ }
+
+ return new Promise(function(resolve, reject) {
+ if (!self.capabilities.canPresent) {
+ reject(new Error('VRDisplay is not capable of presenting.'));
+ return;
+ }
+
+ if (layers.length == 0 || layers.length > self.capabilities.maxLayers) {
+ reject(new Error('Invalid number of layers.'));
+ return;
+ }
+
+ var incomingLayer = layers[0];
+ if (!incomingLayer.source) {
+ /*
+ todo: figure out the correct behavior if the source is not provided.
+ see https://github.com/w3c/webvr/issues/58
+ */
+ resolve();
+ return;
+ }
+
+ var leftBounds = incomingLayer.leftBounds || defaultLeftBounds;
+ var rightBounds = incomingLayer.rightBounds || defaultRightBounds;
+ if (wasPresenting) {
+ // Already presenting, just changing configuration
+ var changed = false;
+ var layer = self.layer_;
+ if (layer.source !== incomingLayer.source) {
+ layer.source = incomingLayer.source;
+ changed = true;
+ }
+
+ for (var i = 0; i < 4; i++) {
+ if (layer.leftBounds[i] !== leftBounds[i]) {
+ layer.leftBounds[i] = leftBounds[i];
+ changed = true;
+ }
+ if (layer.rightBounds[i] !== rightBounds[i]) {
+ layer.rightBounds[i] = rightBounds[i];
+ changed = true;
+ }
+ }
+
+ if (changed) {
+ self.fireVRDisplayPresentChange_();
+ }
+ resolve();
+ return;
+ }
+
+ // Was not already presenting.
+ self.layer_ = {
+ predistorted: incomingLayer.predistorted,
+ source: incomingLayer.source,
+ leftBounds: leftBounds.slice(0),
+ rightBounds: rightBounds.slice(0)
+ };
+
+ self.waitingForPresent_ = false;
+ if (self.layer_ && self.layer_.source) {
+ var fullscreenElement = self.wrapForFullscreen(self.layer_.source);
+
+ function onFullscreenChange() {
+ var actualFullscreenElement = Util.getFullscreenElement();
+
+ self.isPresenting = (fullscreenElement === actualFullscreenElement);
+ if (self.isPresenting) {
+ if (screen.orientation && screen.orientation.lock) {
+ screen.orientation.lock('landscape-primary').catch(function(error){
+ console.error('screen.orientation.lock() failed due to', error.message)
+ });
+ }
+ self.waitingForPresent_ = false;
+ self.beginPresent_();
+ resolve();
+ } else {
+ if (screen.orientation && screen.orientation.unlock) {
+ screen.orientation.unlock();
+ }
+ self.removeFullscreenWrapper();
+ self.wakelock_.release();
+ self.endPresent_();
+ self.removeFullscreenListeners_();
+ }
+ self.fireVRDisplayPresentChange_();
+ }
+ function onFullscreenError() {
+ if (!self.waitingForPresent_) {
+ return;
+ }
+
+ self.removeFullscreenWrapper();
+ self.removeFullscreenListeners_();
+
+ self.wakelock_.release();
+ self.waitingForPresent_ = false;
+ self.isPresenting = false;
+
+ reject(new Error('Unable to present.'));
+ }
+
+ self.addFullscreenListeners_(fullscreenElement,
+ onFullscreenChange, onFullscreenError);
+
+ if (Util.requestFullscreen(fullscreenElement)) {
+ self.wakelock_.request();
+ self.waitingForPresent_ = true;
+ } else if (Util.isIOS()) {
+ // *sigh* Just fake it.
+ self.wakelock_.request();
+ self.isPresenting = true;
+ self.beginPresent_();
+ self.fireVRDisplayPresentChange_();
+ resolve();
+ }
+ }
+
+ if (!self.waitingForPresent_ && !Util.isIOS()) {
+ Util.exitFullscreen();
+ reject(new Error('Unable to present.'));
+ }
+ });
+};
+
+VRDisplay.prototype.exitPresent = function() {
+ var wasPresenting = this.isPresenting;
+ var self = this;
+ this.isPresenting = false;
+ this.layer_ = null;
+ this.wakelock_.release();
+
+ return new Promise(function(resolve, reject) {
+ if (wasPresenting) {
+ if (!Util.exitFullscreen() && Util.isIOS()) {
+ self.endPresent_();
+ self.fireVRDisplayPresentChange_();
+ }
+
+ resolve();
+ } else {
+ reject(new Error('Was not presenting to VRDisplay.'));
+ }
+ });
+};
+
+VRDisplay.prototype.getLayers = function() {
+ if (this.layer_) {
+ return [this.layer_];
+ }
+ return [];
+};
+
+VRDisplay.prototype.fireVRDisplayPresentChange_ = function() {
+ var event = new CustomEvent('vrdisplaypresentchange', {detail: {vrdisplay: this}});
+ window.dispatchEvent(event);
+};
+
+VRDisplay.prototype.addFullscreenListeners_ = function(element, changeHandler, errorHandler) {
+ this.removeFullscreenListeners_();
+
+ this.fullscreenEventTarget_ = element;
+ this.fullscreenChangeHandler_ = changeHandler;
+ this.fullscreenErrorHandler_ = errorHandler;
+
+ if (changeHandler) {
+ element.addEventListener('fullscreenchange', changeHandler, false);
+ element.addEventListener('webkitfullscreenchange', changeHandler, false);
+ document.addEventListener('mozfullscreenchange', changeHandler, false);
+ element.addEventListener('msfullscreenchange', changeHandler, false);
+ }
+
+ if (errorHandler) {
+ element.addEventListener('fullscreenerror', errorHandler, false);
+ element.addEventListener('webkitfullscreenerror', errorHandler, false);
+ document.addEventListener('mozfullscreenerror', errorHandler, false);
+ element.addEventListener('msfullscreenerror', errorHandler, false);
+ }
+};
+
+VRDisplay.prototype.removeFullscreenListeners_ = function() {
+ if (!this.fullscreenEventTarget_)
+ return;
+
+ var element = this.fullscreenEventTarget_;
+
+ if (this.fullscreenChangeHandler_) {
+ var changeHandler = this.fullscreenChangeHandler_;
+ element.removeEventListener('fullscreenchange', changeHandler, false);
+ element.removeEventListener('webkitfullscreenchange', changeHandler, false);
+ document.removeEventListener('mozfullscreenchange', changeHandler, false);
+ element.removeEventListener('msfullscreenchange', changeHandler, false);
+ }
+
+ if (this.fullscreenErrorHandler_) {
+ var errorHandler = this.fullscreenErrorHandler_;
+ element.removeEventListener('fullscreenerror', errorHandler, false);
+ element.removeEventListener('webkitfullscreenerror', errorHandler, false);
+ document.removeEventListener('mozfullscreenerror', errorHandler, false);
+ element.removeEventListener('msfullscreenerror', errorHandler, false);
+ }
+
+ this.fullscreenEventTarget_ = null;
+ this.fullscreenChangeHandler_ = null;
+ this.fullscreenErrorHandler_ = null;
+};
+
+VRDisplay.prototype.beginPresent_ = function() {
+ // Override to add custom behavior when presentation begins.
+};
+
+VRDisplay.prototype.endPresent_ = function() {
+ // Override to add custom behavior when presentation ends.
+};
+
+VRDisplay.prototype.submitFrame = function(pose) {
+ // Override to add custom behavior for frame submission.
+};
+
+VRDisplay.prototype.getEyeParameters = function(whichEye) {
+ // Override to return accurate eye parameters if canPresent is true.
+ return null;
+};
+
+/*
+ * Deprecated classes
+ */
+
+/**
+ * The base class for all VR devices. (Deprecated)
+ */
+function VRDevice() {
+ this.isPolyfilled = true;
+ this.hardwareUnitId = 'webvr-polyfill hardwareUnitId';
+ this.deviceId = 'webvr-polyfill deviceId';
+ this.deviceName = 'webvr-polyfill deviceName';
+}
+
+/**
+ * The base class for all VR HMD devices. (Deprecated)
+ */
+function HMDVRDevice() {
+}
+HMDVRDevice.prototype = new VRDevice();
+
+/**
+ * The base class for all VR position sensor devices. (Deprecated)
+ */
+function PositionSensorVRDevice() {
+}
+PositionSensorVRDevice.prototype = new VRDevice();
+
+module.exports.VRFrameData = VRFrameData;
+module.exports.VRDisplay = VRDisplay;
+module.exports.VRDevice = VRDevice;
+module.exports.HMDVRDevice = HMDVRDevice;
+module.exports.PositionSensorVRDevice = PositionSensorVRDevice;
+
+},{"./util.js":22,"./wakelock.js":24}],3:[function(_dereq_,module,exports){
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var CardboardUI = _dereq_('./cardboard-ui.js');
+var Util = _dereq_('./util.js');
+var WGLUPreserveGLState = _dereq_('./deps/wglu-preserve-state.js');
+
+var distortionVS = [
+ 'attribute vec2 position;',
+ 'attribute vec3 texCoord;',
+
+ 'varying vec2 vTexCoord;',
+
+ 'uniform vec4 viewportOffsetScale[2];',
+
+ 'void main() {',
+ ' vec4 viewport = viewportOffsetScale[int(texCoord.z)];',
+ ' vTexCoord = (texCoord.xy * viewport.zw) + viewport.xy;',
+ ' gl_Position = vec4( position, 1.0, 1.0 );',
+ '}',
+].join('\n');
+
+var distortionFS = [
+ 'precision mediump float;',
+ 'uniform sampler2D diffuse;',
+
+ 'varying vec2 vTexCoord;',
+
+ 'void main() {',
+ ' gl_FragColor = texture2D(diffuse, vTexCoord);',
+ '}',
+].join('\n');
+
+/**
+ * A mesh-based distorter.
+ */
+function CardboardDistorter(gl) {
+ this.gl = gl;
+ this.ctxAttribs = gl.getContextAttributes();
+
+ this.meshWidth = 20;
+ this.meshHeight = 20;
+
+ this.bufferScale = WebVRConfig.BUFFER_SCALE;
+
+ this.bufferWidth = gl.drawingBufferWidth;
+ this.bufferHeight = gl.drawingBufferHeight;
+
+ // Patching support
+ this.realBindFramebuffer = gl.bindFramebuffer;
+ this.realEnable = gl.enable;
+ this.realDisable = gl.disable;
+ this.realColorMask = gl.colorMask;
+ this.realClearColor = gl.clearColor;
+ this.realViewport = gl.viewport;
+
+ if (!Util.isIOS()) {
+ this.realCanvasWidth = Object.getOwnPropertyDescriptor(gl.canvas.__proto__, 'width');
+ this.realCanvasHeight = Object.getOwnPropertyDescriptor(gl.canvas.__proto__, 'height');
+ }
+
+ this.isPatched = false;
+
+ // State tracking
+ this.lastBoundFramebuffer = null;
+ this.cullFace = false;
+ this.depthTest = false;
+ this.blend = false;
+ this.scissorTest = false;
+ this.stencilTest = false;
+ this.viewport = [0, 0, 0, 0];
+ this.colorMask = [true, true, true, true];
+ this.clearColor = [0, 0, 0, 0];
+
+ this.attribs = {
+ position: 0,
+ texCoord: 1
+ };
+ this.program = Util.linkProgram(gl, distortionVS, distortionFS, this.attribs);
+ this.uniforms = Util.getProgramUniforms(gl, this.program);
+
+ this.viewportOffsetScale = new Float32Array(8);
+ this.setTextureBounds();
+
+ this.vertexBuffer = gl.createBuffer();
+ this.indexBuffer = gl.createBuffer();
+ this.indexCount = 0;
+
+ this.renderTarget = gl.createTexture();
+ this.framebuffer = gl.createFramebuffer();
+
+ this.depthStencilBuffer = null;
+ this.depthBuffer = null;
+ this.stencilBuffer = null;
+
+ if (this.ctxAttribs.depth && this.ctxAttribs.stencil) {
+ this.depthStencilBuffer = gl.createRenderbuffer();
+ } else if (this.ctxAttribs.depth) {
+ this.depthBuffer = gl.createRenderbuffer();
+ } else if (this.ctxAttribs.stencil) {
+ this.stencilBuffer = gl.createRenderbuffer();
+ }
+
+ this.patch();
+
+ this.onResize();
+
+ if (!WebVRConfig.CARDBOARD_UI_DISABLED) {
+ this.cardboardUI = new CardboardUI(gl);
+ }
+};
+
+/**
+ * Tears down all the resources created by the distorter and removes any
+ * patches.
+ */
+CardboardDistorter.prototype.destroy = function() {
+ var gl = this.gl;
+
+ this.unpatch();
+
+ gl.deleteProgram(this.program);
+ gl.deleteBuffer(this.vertexBuffer);
+ gl.deleteBuffer(this.indexBuffer);
+ gl.deleteTexture(this.renderTarget);
+ gl.deleteFramebuffer(this.framebuffer);
+ if (this.depthStencilBuffer) {
+ gl.deleteRenderbuffer(this.depthStencilBuffer);
+ }
+ if (this.depthBuffer) {
+ gl.deleteRenderbuffer(this.depthBuffer);
+ }
+ if (this.stencilBuffer) {
+ gl.deleteRenderbuffer(this.stencilBuffer);
+ }
+
+ if (this.cardboardUI) {
+ this.cardboardUI.destroy();
+ }
+};
+
+
+/**
+ * Resizes the backbuffer to match the canvas width and height.
+ */
+CardboardDistorter.prototype.onResize = function() {
+ var gl = this.gl;
+ var self = this;
+
+ var glState = [
+ gl.RENDERBUFFER_BINDING,
+ gl.TEXTURE_BINDING_2D, gl.TEXTURE0
+ ];
+
+ WGLUPreserveGLState(gl, glState, function(gl) {
+ // Bind real backbuffer and clear it once. We don't need to clear it again
+ // after that because we're overwriting the same area every frame.
+ self.realBindFramebuffer.call(gl, gl.FRAMEBUFFER, null);
+
+ // Put things in a good state
+ if (self.scissorTest) { self.realDisable.call(gl, gl.SCISSOR_TEST); }
+ self.realColorMask.call(gl, true, true, true, true);
+ self.realViewport.call(gl, 0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
+ self.realClearColor.call(gl, 0, 0, 0, 1);
+
+ gl.clear(gl.COLOR_BUFFER_BIT);
+
+ // Now bind and resize the fake backbuffer
+ self.realBindFramebuffer.call(gl, gl.FRAMEBUFFER, self.framebuffer);
+
+ gl.bindTexture(gl.TEXTURE_2D, self.renderTarget);
+ gl.texImage2D(gl.TEXTURE_2D, 0, self.ctxAttribs.alpha ? gl.RGBA : gl.RGB,
+ self.bufferWidth, self.bufferHeight, 0,
+ self.ctxAttribs.alpha ? gl.RGBA : gl.RGB, gl.UNSIGNED_BYTE, null);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+ gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, self.renderTarget, 0);
+
+ if (self.ctxAttribs.depth && self.ctxAttribs.stencil) {
+ gl.bindRenderbuffer(gl.RENDERBUFFER, self.depthStencilBuffer);
+ gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_STENCIL,
+ self.bufferWidth, self.bufferHeight);
+ gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_STENCIL_ATTACHMENT,
+ gl.RENDERBUFFER, self.depthStencilBuffer);
+ } else if (self.ctxAttribs.depth) {
+ gl.bindRenderbuffer(gl.RENDERBUFFER, self.depthBuffer);
+ gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16,
+ self.bufferWidth, self.bufferHeight);
+ gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT,
+ gl.RENDERBUFFER, self.depthBuffer);
+ } else if (self.ctxAttribs.stencil) {
+ gl.bindRenderbuffer(gl.RENDERBUFFER, self.stencilBuffer);
+ gl.renderbufferStorage(gl.RENDERBUFFER, gl.STENCIL_INDEX8,
+ self.bufferWidth, self.bufferHeight);
+ gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.STENCIL_ATTACHMENT,
+ gl.RENDERBUFFER, self.stencilBuffer);
+ }
+
+ if (!gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE) {
+ console.error('Framebuffer incomplete!');
+ }
+
+ self.realBindFramebuffer.call(gl, gl.FRAMEBUFFER, self.lastBoundFramebuffer);
+
+ if (self.scissorTest) { self.realEnable.call(gl, gl.SCISSOR_TEST); }
+
+ self.realColorMask.apply(gl, self.colorMask);
+ self.realViewport.apply(gl, self.viewport);
+ self.realClearColor.apply(gl, self.clearColor);
+ });
+
+ if (this.cardboardUI) {
+ this.cardboardUI.onResize();
+ }
+};
+
+CardboardDistorter.prototype.patch = function() {
+ if (this.isPatched) {
+ return;
+ }
+
+ var self = this;
+ var canvas = this.gl.canvas;
+ var gl = this.gl;
+
+ if (!Util.isIOS()) {
+ canvas.width = Util.getScreenWidth() * this.bufferScale;
+ canvas.height = Util.getScreenHeight() * this.bufferScale;
+
+ Object.defineProperty(canvas, 'width', {
+ configurable: true,
+ enumerable: true,
+ get: function() {
+ return self.bufferWidth;
+ },
+ set: function(value) {
+ self.bufferWidth = value;
+ self.onResize();
+ }
+ });
+
+ Object.defineProperty(canvas, 'height', {
+ configurable: true,
+ enumerable: true,
+ get: function() {
+ return self.bufferHeight;
+ },
+ set: function(value) {
+ self.bufferHeight = value;
+ self.onResize();
+ }
+ });
+ }
+
+ this.lastBoundFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING);
+
+ if (this.lastBoundFramebuffer == null) {
+ this.lastBoundFramebuffer = this.framebuffer;
+ this.gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
+ }
+
+ this.gl.bindFramebuffer = function(target, framebuffer) {
+ self.lastBoundFramebuffer = framebuffer ? framebuffer : self.framebuffer;
+ // Silently make calls to bind the default framebuffer bind ours instead.
+ self.realBindFramebuffer.call(gl, target, self.lastBoundFramebuffer);
+ };
+
+ this.cullFace = gl.getParameter(gl.CULL_FACE);
+ this.depthTest = gl.getParameter(gl.DEPTH_TEST);
+ this.blend = gl.getParameter(gl.BLEND);
+ this.scissorTest = gl.getParameter(gl.SCISSOR_TEST);
+ this.stencilTest = gl.getParameter(gl.STENCIL_TEST);
+
+ gl.enable = function(pname) {
+ switch (pname) {
+ case gl.CULL_FACE: self.cullFace = true; break;
+ case gl.DEPTH_TEST: self.depthTest = true; break;
+ case gl.BLEND: self.blend = true; break;
+ case gl.SCISSOR_TEST: self.scissorTest = true; break;
+ case gl.STENCIL_TEST: self.stencilTest = true; break;
+ }
+ self.realEnable.call(gl, pname);
+ };
+
+ gl.disable = function(pname) {
+ switch (pname) {
+ case gl.CULL_FACE: self.cullFace = false; break;
+ case gl.DEPTH_TEST: self.depthTest = false; break;
+ case gl.BLEND: self.blend = false; break;
+ case gl.SCISSOR_TEST: self.scissorTest = false; break;
+ case gl.STENCIL_TEST: self.stencilTest = false; break;
+ }
+ self.realDisable.call(gl, pname);
+ };
+
+ this.colorMask = gl.getParameter(gl.COLOR_WRITEMASK);
+ gl.colorMask = function(r, g, b, a) {
+ self.colorMask[0] = r;
+ self.colorMask[1] = g;
+ self.colorMask[2] = b;
+ self.colorMask[3] = a;
+ self.realColorMask.call(gl, r, g, b, a);
+ };
+
+ this.clearColor = gl.getParameter(gl.COLOR_CLEAR_VALUE);
+ gl.clearColor = function(r, g, b, a) {
+ self.clearColor[0] = r;
+ self.clearColor[1] = g;
+ self.clearColor[2] = b;
+ self.clearColor[3] = a;
+ self.realClearColor.call(gl, r, g, b, a);
+ };
+
+ this.viewport = gl.getParameter(gl.VIEWPORT);
+ gl.viewport = function(x, y, w, h) {
+ self.viewport[0] = x;
+ self.viewport[1] = y;
+ self.viewport[2] = w;
+ self.viewport[3] = h;
+ self.realViewport.call(gl, x, y, w, h);
+ };
+
+ this.isPatched = true;
+ Util.safariCssSizeWorkaround(canvas);
+};
+
+CardboardDistorter.prototype.unpatch = function() {
+ if (!this.isPatched) {
+ return;
+ }
+
+ var gl = this.gl;
+ var canvas = this.gl.canvas;
+
+ if (!Util.isIOS()) {
+ Object.defineProperty(canvas, 'width', this.realCanvasWidth);
+ Object.defineProperty(canvas, 'height', this.realCanvasHeight);
+ }
+ canvas.width = this.bufferWidth;
+ canvas.height = this.bufferHeight;
+
+ gl.bindFramebuffer = this.realBindFramebuffer;
+ gl.enable = this.realEnable;
+ gl.disable = this.realDisable;
+ gl.colorMask = this.realColorMask;
+ gl.clearColor = this.realClearColor;
+ gl.viewport = this.realViewport;
+
+ // Check to see if our fake backbuffer is bound and bind the real backbuffer
+ // if that's the case.
+ if (this.lastBoundFramebuffer == this.framebuffer) {
+ gl.bindFramebuffer(gl.FRAMEBUFFER, null);
+ }
+
+ this.isPatched = false;
+
+ setTimeout(function() {
+ Util.safariCssSizeWorkaround(canvas);
+ }, 1);
+};
+
+CardboardDistorter.prototype.setTextureBounds = function(leftBounds, rightBounds) {
+ if (!leftBounds) {
+ leftBounds = [0, 0, 0.5, 1];
+ }
+
+ if (!rightBounds) {
+ rightBounds = [0.5, 0, 0.5, 1];
+ }
+
+ // Left eye
+ this.viewportOffsetScale[0] = leftBounds[0]; // X
+ this.viewportOffsetScale[1] = leftBounds[1]; // Y
+ this.viewportOffsetScale[2] = leftBounds[2]; // Width
+ this.viewportOffsetScale[3] = leftBounds[3]; // Height
+
+ // Right eye
+ this.viewportOffsetScale[4] = rightBounds[0]; // X
+ this.viewportOffsetScale[5] = rightBounds[1]; // Y
+ this.viewportOffsetScale[6] = rightBounds[2]; // Width
+ this.viewportOffsetScale[7] = rightBounds[3]; // Height
+};
+
+/**
+ * Performs distortion pass on the injected backbuffer, rendering it to the real
+ * backbuffer.
+ */
+CardboardDistorter.prototype.submitFrame = function() {
+ var gl = this.gl;
+ var self = this;
+
+ var glState = [];
+
+ if (!WebVRConfig.DIRTY_SUBMIT_FRAME_BINDINGS) {
+ glState.push(
+ gl.CURRENT_PROGRAM,
+ gl.ARRAY_BUFFER_BINDING,
+ gl.ELEMENT_ARRAY_BUFFER_BINDING,
+ gl.TEXTURE_BINDING_2D, gl.TEXTURE0
+ );
+ }
+
+ WGLUPreserveGLState(gl, glState, function(gl) {
+ // Bind the real default framebuffer
+ self.realBindFramebuffer.call(gl, gl.FRAMEBUFFER, null);
+
+ // Make sure the GL state is in a good place
+ if (self.cullFace) { self.realDisable.call(gl, gl.CULL_FACE); }
+ if (self.depthTest) { self.realDisable.call(gl, gl.DEPTH_TEST); }
+ if (self.blend) { self.realDisable.call(gl, gl.BLEND); }
+ if (self.scissorTest) { self.realDisable.call(gl, gl.SCISSOR_TEST); }
+ if (self.stencilTest) { self.realDisable.call(gl, gl.STENCIL_TEST); }
+ self.realColorMask.call(gl, true, true, true, true);
+ self.realViewport.call(gl, 0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
+
+ // If the backbuffer has an alpha channel clear every frame so the page
+ // doesn't show through.
+ if (self.ctxAttribs.alpha || Util.isIOS()) {
+ self.realClearColor.call(gl, 0, 0, 0, 1);
+ gl.clear(gl.COLOR_BUFFER_BIT);
+ }
+
+ // Bind distortion program and mesh
+ gl.useProgram(self.program);
+
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, self.indexBuffer);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, self.vertexBuffer);
+ gl.enableVertexAttribArray(self.attribs.position);
+ gl.enableVertexAttribArray(self.attribs.texCoord);
+ gl.vertexAttribPointer(self.attribs.position, 2, gl.FLOAT, false, 20, 0);
+ gl.vertexAttribPointer(self.attribs.texCoord, 3, gl.FLOAT, false, 20, 8);
+
+ gl.activeTexture(gl.TEXTURE0);
+ gl.uniform1i(self.uniforms.diffuse, 0);
+ gl.bindTexture(gl.TEXTURE_2D, self.renderTarget);
+
+ gl.uniform4fv(self.uniforms.viewportOffsetScale, self.viewportOffsetScale);
+
+ // Draws both eyes
+ gl.drawElements(gl.TRIANGLES, self.indexCount, gl.UNSIGNED_SHORT, 0);
+
+ if (self.cardboardUI) {
+ self.cardboardUI.renderNoState();
+ }
+
+ // Bind the fake default framebuffer again
+ self.realBindFramebuffer.call(self.gl, gl.FRAMEBUFFER, self.framebuffer);
+
+ // If preserveDrawingBuffer == false clear the framebuffer
+ if (!self.ctxAttribs.preserveDrawingBuffer) {
+ self.realClearColor.call(gl, 0, 0, 0, 0);
+ gl.clear(gl.COLOR_BUFFER_BIT);
+ }
+
+ if (!WebVRConfig.DIRTY_SUBMIT_FRAME_BINDINGS) {
+ self.realBindFramebuffer.call(gl, gl.FRAMEBUFFER, self.lastBoundFramebuffer);
+ }
+
+ // Restore state
+ if (self.cullFace) { self.realEnable.call(gl, gl.CULL_FACE); }
+ if (self.depthTest) { self.realEnable.call(gl, gl.DEPTH_TEST); }
+ if (self.blend) { self.realEnable.call(gl, gl.BLEND); }
+ if (self.scissorTest) { self.realEnable.call(gl, gl.SCISSOR_TEST); }
+ if (self.stencilTest) { self.realEnable.call(gl, gl.STENCIL_TEST); }
+
+ self.realColorMask.apply(gl, self.colorMask);
+ self.realViewport.apply(gl, self.viewport);
+ if (self.ctxAttribs.alpha || !self.ctxAttribs.preserveDrawingBuffer) {
+ self.realClearColor.apply(gl, self.clearColor);
+ }
+ });
+
+ // Workaround for the fact that Safari doesn't allow us to patch the canvas
+ // width and height correctly. After each submit frame check to see what the
+ // real backbuffer size has been set to and resize the fake backbuffer size
+ // to match.
+ if (Util.isIOS()) {
+ var canvas = gl.canvas;
+ if (canvas.width != self.bufferWidth || canvas.height != self.bufferHeight) {
+ self.bufferWidth = canvas.width;
+ self.bufferHeight = canvas.height;
+ self.onResize();
+ }
+ }
+};
+
+/**
+ * Call when the deviceInfo has changed. At this point we need
+ * to re-calculate the distortion mesh.
+ */
+CardboardDistorter.prototype.updateDeviceInfo = function(deviceInfo) {
+ var gl = this.gl;
+ var self = this;
+
+ var glState = [gl.ARRAY_BUFFER_BINDING, gl.ELEMENT_ARRAY_BUFFER_BINDING];
+ WGLUPreserveGLState(gl, glState, function(gl) {
+ var vertices = self.computeMeshVertices_(self.meshWidth, self.meshHeight, deviceInfo);
+ gl.bindBuffer(gl.ARRAY_BUFFER, self.vertexBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
+
+ // Indices don't change based on device parameters, so only compute once.
+ if (!self.indexCount) {
+ var indices = self.computeMeshIndices_(self.meshWidth, self.meshHeight);
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, self.indexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
+ self.indexCount = indices.length;
+ }
+ });
+};
+
+/**
+ * Build the distortion mesh vertices.
+ * Based on code from the Unity cardboard plugin.
+ */
+CardboardDistorter.prototype.computeMeshVertices_ = function(width, height, deviceInfo) {
+ var vertices = new Float32Array(2 * width * height * 5);
+
+ var lensFrustum = deviceInfo.getLeftEyeVisibleTanAngles();
+ var noLensFrustum = deviceInfo.getLeftEyeNoLensTanAngles();
+ var viewport = deviceInfo.getLeftEyeVisibleScreenRect(noLensFrustum);
+ var vidx = 0;
+ var iidx = 0;
+ for (var e = 0; e < 2; e++) {
+ for (var j = 0; j < height; j++) {
+ for (var i = 0; i < width; i++, vidx++) {
+ var u = i / (width - 1);
+ var v = j / (height - 1);
+
+ // Grid points regularly spaced in StreoScreen, and barrel distorted in
+ // the mesh.
+ var s = u;
+ var t = v;
+ var x = Util.lerp(lensFrustum[0], lensFrustum[2], u);
+ var y = Util.lerp(lensFrustum[3], lensFrustum[1], v);
+ var d = Math.sqrt(x * x + y * y);
+ var r = deviceInfo.distortion.distortInverse(d);
+ var p = x * r / d;
+ var q = y * r / d;
+ u = (p - noLensFrustum[0]) / (noLensFrustum[2] - noLensFrustum[0]);
+ v = (q - noLensFrustum[3]) / (noLensFrustum[1] - noLensFrustum[3]);
+
+ // Convert u,v to mesh screen coordinates.
+ var aspect = deviceInfo.device.widthMeters / deviceInfo.device.heightMeters;
+
+ // FIXME: The original Unity plugin multiplied U by the aspect ratio
+ // and didn't multiply either value by 2, but that seems to get it
+ // really close to correct looking for me. I hate this kind of "Don't
+ // know why it works" code though, and wold love a more logical
+ // explanation of what needs to happen here.
+ u = (viewport.x + u * viewport.width - 0.5) * 2.0; //* aspect;
+ v = (viewport.y + v * viewport.height - 0.5) * 2.0;
+
+ vertices[(vidx * 5) + 0] = u; // position.x
+ vertices[(vidx * 5) + 1] = v; // position.y
+ vertices[(vidx * 5) + 2] = s; // texCoord.x
+ vertices[(vidx * 5) + 3] = t; // texCoord.y
+ vertices[(vidx * 5) + 4] = e; // texCoord.z (viewport index)
+ }
+ }
+ var w = lensFrustum[2] - lensFrustum[0];
+ lensFrustum[0] = -(w + lensFrustum[0]);
+ lensFrustum[2] = w - lensFrustum[2];
+ w = noLensFrustum[2] - noLensFrustum[0];
+ noLensFrustum[0] = -(w + noLensFrustum[0]);
+ noLensFrustum[2] = w - noLensFrustum[2];
+ viewport.x = 1 - (viewport.x + viewport.width);
+ }
+ return vertices;
+}
+
+/**
+ * Build the distortion mesh indices.
+ * Based on code from the Unity cardboard plugin.
+ */
+CardboardDistorter.prototype.computeMeshIndices_ = function(width, height) {
+ var indices = new Uint16Array(2 * (width - 1) * (height - 1) * 6);
+ var halfwidth = width / 2;
+ var halfheight = height / 2;
+ var vidx = 0;
+ var iidx = 0;
+ for (var e = 0; e < 2; e++) {
+ for (var j = 0; j < height; j++) {
+ for (var i = 0; i < width; i++, vidx++) {
+ if (i == 0 || j == 0)
+ continue;
+ // Build a quad. Lower right and upper left quadrants have quads with
+ // the triangle diagonal flipped to get the vignette to interpolate
+ // correctly.
+ if ((i <= halfwidth) == (j <= halfheight)) {
+ // Quad diagonal lower left to upper right.
+ indices[iidx++] = vidx;
+ indices[iidx++] = vidx - width - 1;
+ indices[iidx++] = vidx - width;
+ indices[iidx++] = vidx - width - 1;
+ indices[iidx++] = vidx;
+ indices[iidx++] = vidx - 1;
+ } else {
+ // Quad diagonal upper left to lower right.
+ indices[iidx++] = vidx - 1;
+ indices[iidx++] = vidx - width;
+ indices[iidx++] = vidx;
+ indices[iidx++] = vidx - width;
+ indices[iidx++] = vidx - 1;
+ indices[iidx++] = vidx - width - 1;
+ }
+ }
+ }
+ }
+ return indices;
+};
+
+CardboardDistorter.prototype.getOwnPropertyDescriptor_ = function(proto, attrName) {
+ var descriptor = Object.getOwnPropertyDescriptor(proto, attrName);
+ // In some cases (ahem... Safari), the descriptor returns undefined get and
+ // set fields. In this case, we need to create a synthetic property
+ // descriptor. This works around some of the issues in
+ // https://github.com/borismus/webvr-polyfill/issues/46
+ if (descriptor.get === undefined || descriptor.set === undefined) {
+ descriptor.configurable = true;
+ descriptor.enumerable = true;
+ descriptor.get = function() {
+ return this.getAttribute(attrName);
+ };
+ descriptor.set = function(val) {
+ this.setAttribute(attrName, val);
+ };
+ }
+ return descriptor;
+};
+
+module.exports = CardboardDistorter;
+
+},{"./cardboard-ui.js":4,"./deps/wglu-preserve-state.js":6,"./util.js":22}],4:[function(_dereq_,module,exports){
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var Util = _dereq_('./util.js');
+var WGLUPreserveGLState = _dereq_('./deps/wglu-preserve-state.js');
+
+var uiVS = [
+ 'attribute vec2 position;',
+
+ 'uniform mat4 projectionMat;',
+
+ 'void main() {',
+ ' gl_Position = projectionMat * vec4( position, -1.0, 1.0 );',
+ '}',
+].join('\n');
+
+var uiFS = [
+ 'precision mediump float;',
+
+ 'uniform vec4 color;',
+
+ 'void main() {',
+ ' gl_FragColor = color;',
+ '}',
+].join('\n');
+
+var DEG2RAD = Math.PI/180.0;
+
+// The gear has 6 identical sections, each spanning 60 degrees.
+var kAnglePerGearSection = 60;
+
+// Half-angle of the span of the outer rim.
+var kOuterRimEndAngle = 12;
+
+// Angle between the middle of the outer rim and the start of the inner rim.
+var kInnerRimBeginAngle = 20;
+
+// Distance from center to outer rim, normalized so that the entire model
+// fits in a [-1, 1] x [-1, 1] square.
+var kOuterRadius = 1;
+
+// Distance from center to depressed rim, in model units.
+var kMiddleRadius = 0.75;
+
+// Radius of the inner hollow circle, in model units.
+var kInnerRadius = 0.3125;
+
+// Center line thickness in DP.
+var kCenterLineThicknessDp = 4;
+
+// Button width in DP.
+var kButtonWidthDp = 28;
+
+// Factor to scale the touch area that responds to the touch.
+var kTouchSlopFactor = 1.5;
+
+var Angles = [
+ 0, kOuterRimEndAngle, kInnerRimBeginAngle,
+ kAnglePerGearSection - kInnerRimBeginAngle,
+ kAnglePerGearSection - kOuterRimEndAngle
+];
+
+/**
+ * Renders the alignment line and "options" gear. It is assumed that the canvas
+ * this is rendered into covers the entire screen (or close to it.)
+ */
+function CardboardUI(gl) {
+ this.gl = gl;
+
+ this.attribs = {
+ position: 0
+ };
+ this.program = Util.linkProgram(gl, uiVS, uiFS, this.attribs);
+ this.uniforms = Util.getProgramUniforms(gl, this.program);
+
+ this.vertexBuffer = gl.createBuffer();
+ this.gearOffset = 0;
+ this.gearVertexCount = 0;
+ this.arrowOffset = 0;
+ this.arrowVertexCount = 0;
+
+ this.projMat = new Float32Array(16);
+
+ this.listener = null;
+
+ this.onResize();
+};
+
+/**
+ * Tears down all the resources created by the UI renderer.
+ */
+CardboardUI.prototype.destroy = function() {
+ var gl = this.gl;
+
+ if (this.listener) {
+ gl.canvas.removeEventListener('click', this.listener, false);
+ }
+
+ gl.deleteProgram(this.program);
+ gl.deleteBuffer(this.vertexBuffer);
+};
+
+/**
+ * Adds a listener to clicks on the gear and back icons
+ */
+CardboardUI.prototype.listen = function(optionsCallback, backCallback) {
+ var canvas = this.gl.canvas;
+ this.listener = function(event) {
+ var midline = canvas.clientWidth / 2;
+ var buttonSize = kButtonWidthDp * kTouchSlopFactor;
+ // Check to see if the user clicked on (or around) the gear icon
+ if (event.clientX > midline - buttonSize &&
+ event.clientX < midline + buttonSize &&
+ event.clientY > canvas.clientHeight - buttonSize) {
+ optionsCallback(event);
+ }
+ // Check to see if the user clicked on (or around) the back icon
+ else if (event.clientX < buttonSize && event.clientY < buttonSize) {
+ backCallback(event);
+ }
+ };
+ canvas.addEventListener('click', this.listener, false);
+};
+
+/**
+ * Builds the UI mesh.
+ */
+CardboardUI.prototype.onResize = function() {
+ var gl = this.gl;
+ var self = this;
+
+ var glState = [
+ gl.ARRAY_BUFFER_BINDING
+ ];
+
+ WGLUPreserveGLState(gl, glState, function(gl) {
+ var vertices = [];
+
+ var midline = gl.drawingBufferWidth / 2;
+
+ // Assumes your canvas width and height is scaled proportionately.
+ // TODO(smus): The following causes buttons to become huge on iOS, but seems
+ // like the right thing to do. For now, added a hack. But really, investigate why.
+ var dps = (gl.drawingBufferWidth / (screen.width * window.devicePixelRatio));
+ if (!Util.isIOS()) {
+ dps *= window.devicePixelRatio;
+ }
+
+ var lineWidth = kCenterLineThicknessDp * dps / 2;
+ var buttonSize = kButtonWidthDp * kTouchSlopFactor * dps;
+ var buttonScale = kButtonWidthDp * dps / 2;
+ var buttonBorder = ((kButtonWidthDp * kTouchSlopFactor) - kButtonWidthDp) * dps;
+
+ // Build centerline
+ vertices.push(midline - lineWidth, buttonSize);
+ vertices.push(midline - lineWidth, gl.drawingBufferHeight);
+ vertices.push(midline + lineWidth, buttonSize);
+ vertices.push(midline + lineWidth, gl.drawingBufferHeight);
+
+ // Build gear
+ self.gearOffset = (vertices.length / 2);
+
+ function addGearSegment(theta, r) {
+ var angle = (90 - theta) * DEG2RAD;
+ var x = Math.cos(angle);
+ var y = Math.sin(angle);
+ vertices.push(kInnerRadius * x * buttonScale + midline, kInnerRadius * y * buttonScale + buttonScale);
+ vertices.push(r * x * buttonScale + midline, r * y * buttonScale + buttonScale);
+ }
+
+ for (var i = 0; i <= 6; i++) {
+ var segmentTheta = i * kAnglePerGearSection;
+
+ addGearSegment(segmentTheta, kOuterRadius);
+ addGearSegment(segmentTheta + kOuterRimEndAngle, kOuterRadius);
+ addGearSegment(segmentTheta + kInnerRimBeginAngle, kMiddleRadius);
+ addGearSegment(segmentTheta + (kAnglePerGearSection - kInnerRimBeginAngle), kMiddleRadius);
+ addGearSegment(segmentTheta + (kAnglePerGearSection - kOuterRimEndAngle), kOuterRadius);
+ }
+
+ self.gearVertexCount = (vertices.length / 2) - self.gearOffset;
+
+ // Build back arrow
+ self.arrowOffset = (vertices.length / 2);
+
+ function addArrowVertex(x, y) {
+ vertices.push(buttonBorder + x, gl.drawingBufferHeight - buttonBorder - y);
+ }
+
+ var angledLineWidth = lineWidth / Math.sin(45 * DEG2RAD);
+
+ addArrowVertex(0, buttonScale);
+ addArrowVertex(buttonScale, 0);
+ addArrowVertex(buttonScale + angledLineWidth, angledLineWidth);
+ addArrowVertex(angledLineWidth, buttonScale + angledLineWidth);
+
+ addArrowVertex(angledLineWidth, buttonScale - angledLineWidth);
+ addArrowVertex(0, buttonScale);
+ addArrowVertex(buttonScale, buttonScale * 2);
+ addArrowVertex(buttonScale + angledLineWidth, (buttonScale * 2) - angledLineWidth);
+
+ addArrowVertex(angledLineWidth, buttonScale - angledLineWidth);
+ addArrowVertex(0, buttonScale);
+
+ addArrowVertex(angledLineWidth, buttonScale - lineWidth);
+ addArrowVertex(kButtonWidthDp * dps, buttonScale - lineWidth);
+ addArrowVertex(angledLineWidth, buttonScale + lineWidth);
+ addArrowVertex(kButtonWidthDp * dps, buttonScale + lineWidth);
+
+ self.arrowVertexCount = (vertices.length / 2) - self.arrowOffset;
+
+ // Buffer data
+ gl.bindBuffer(gl.ARRAY_BUFFER, self.vertexBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
+ });
+};
+
+/**
+ * Performs distortion pass on the injected backbuffer, rendering it to the real
+ * backbuffer.
+ */
+CardboardUI.prototype.render = function() {
+ var gl = this.gl;
+ var self = this;
+
+ var glState = [
+ gl.CULL_FACE,
+ gl.DEPTH_TEST,
+ gl.BLEND,
+ gl.SCISSOR_TEST,
+ gl.STENCIL_TEST,
+ gl.COLOR_WRITEMASK,
+ gl.VIEWPORT,
+
+ gl.CURRENT_PROGRAM,
+ gl.ARRAY_BUFFER_BINDING
+ ];
+
+ WGLUPreserveGLState(gl, glState, function(gl) {
+ // Make sure the GL state is in a good place
+ gl.disable(gl.CULL_FACE);
+ gl.disable(gl.DEPTH_TEST);
+ gl.disable(gl.BLEND);
+ gl.disable(gl.SCISSOR_TEST);
+ gl.disable(gl.STENCIL_TEST);
+ gl.colorMask(true, true, true, true);
+ gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
+
+ self.renderNoState();
+ });
+};
+
+CardboardUI.prototype.renderNoState = function() {
+ var gl = this.gl;
+
+ // Bind distortion program and mesh
+ gl.useProgram(this.program);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
+ gl.enableVertexAttribArray(this.attribs.position);
+ gl.vertexAttribPointer(this.attribs.position, 2, gl.FLOAT, false, 8, 0);
+
+ gl.uniform4f(this.uniforms.color, 1.0, 1.0, 1.0, 1.0);
+
+ Util.orthoMatrix(this.projMat, 0, gl.drawingBufferWidth, 0, gl.drawingBufferHeight, 0.1, 1024.0);
+ gl.uniformMatrix4fv(this.uniforms.projectionMat, false, this.projMat);
+
+ // Draws UI element
+ gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
+ gl.drawArrays(gl.TRIANGLE_STRIP, this.gearOffset, this.gearVertexCount);
+ gl.drawArrays(gl.TRIANGLE_STRIP, this.arrowOffset, this.arrowVertexCount);
+};
+
+module.exports = CardboardUI;
+
+},{"./deps/wglu-preserve-state.js":6,"./util.js":22}],5:[function(_dereq_,module,exports){
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var CardboardDistorter = _dereq_('./cardboard-distorter.js');
+var CardboardUI = _dereq_('./cardboard-ui.js');
+var DeviceInfo = _dereq_('./device-info.js');
+var Dpdb = _dereq_('./dpdb/dpdb.js');
+var FusionPoseSensor = _dereq_('./sensor-fusion/fusion-pose-sensor.js');
+var RotateInstructions = _dereq_('./rotate-instructions.js');
+var ViewerSelector = _dereq_('./viewer-selector.js');
+var VRDisplay = _dereq_('./base.js').VRDisplay;
+var Util = _dereq_('./util.js');
+
+var Eye = {
+ LEFT: 'left',
+ RIGHT: 'right'
+};
+
+/**
+ * VRDisplay based on mobile device parameters and DeviceMotion APIs.
+ */
+function CardboardVRDisplay() {
+ this.displayName = 'Cardboard VRDisplay (webvr-polyfill)';
+
+ this.capabilities.hasOrientation = true;
+ this.capabilities.canPresent = true;
+
+ // "Private" members.
+ this.bufferScale_ = WebVRConfig.BUFFER_SCALE;
+ this.poseSensor_ = new FusionPoseSensor();
+ this.distorter_ = null;
+ this.cardboardUI_ = null;
+
+ this.dpdb_ = new Dpdb(true, this.onDeviceParamsUpdated_.bind(this));
+ this.deviceInfo_ = new DeviceInfo(this.dpdb_.getDeviceParams());
+
+ this.viewerSelector_ = new ViewerSelector();
+ this.viewerSelector_.on('change', this.onViewerChanged_.bind(this));
+
+ // Set the correct initial viewer.
+ this.deviceInfo_.setViewer(this.viewerSelector_.getCurrentViewer());
+
+ if (!WebVRConfig.ROTATE_INSTRUCTIONS_DISABLED) {
+ this.rotateInstructions_ = new RotateInstructions();
+ }
+
+ if (Util.isIOS()) {
+ // Listen for resize events to workaround this awful Safari bug.
+ window.addEventListener('resize', this.onResize_.bind(this));
+ }
+}
+CardboardVRDisplay.prototype = new VRDisplay();
+
+CardboardVRDisplay.prototype.getImmediatePose = function() {
+ return {
+ position: this.poseSensor_.getPosition(),
+ orientation: this.poseSensor_.getOrientation(),
+ linearVelocity: null,
+ linearAcceleration: null,
+ angularVelocity: null,
+ angularAcceleration: null
+ };
+};
+
+CardboardVRDisplay.prototype.resetPose = function() {
+ this.poseSensor_.resetPose();
+};
+
+CardboardVRDisplay.prototype.getEyeParameters = function(whichEye) {
+ var offset = [this.deviceInfo_.viewer.interLensDistance * 0.5, 0.0, 0.0];
+ var fieldOfView;
+
+ // TODO: FoV can be a little expensive to compute. Cache when device params change.
+ if (whichEye == Eye.LEFT) {
+ offset[0] *= -1.0;
+ fieldOfView = this.deviceInfo_.getFieldOfViewLeftEye();
+ } else if (whichEye == Eye.RIGHT) {
+ fieldOfView = this.deviceInfo_.getFieldOfViewRightEye();
+ } else {
+ console.error('Invalid eye provided: %s', whichEye);
+ return null;
+ }
+
+ return {
+ fieldOfView: fieldOfView,
+ offset: offset,
+ // TODO: Should be able to provide better values than these.
+ renderWidth: this.deviceInfo_.device.width * 0.5 * this.bufferScale_,
+ renderHeight: this.deviceInfo_.device.height * this.bufferScale_,
+ };
+};
+
+CardboardVRDisplay.prototype.onDeviceParamsUpdated_ = function(newParams) {
+ console.log('DPDB reported that device params were updated.');
+ this.deviceInfo_.updateDeviceParams(newParams);
+
+ if (this.distorter_) {
+ this.distorter.updateDeviceInfo(this.deviceInfo_);
+ }
+};
+
+CardboardVRDisplay.prototype.updateBounds_ = function () {
+ if (this.layer_ && this.distorter_ && (this.layer_.leftBounds || this.layer_.rightBounds)) {
+ this.distorter_.setTextureBounds(this.layer_.leftBounds, this.layer_.rightBounds);
+ }
+};
+
+CardboardVRDisplay.prototype.beginPresent_ = function() {
+ var gl = this.layer_.source.getContext('webgl');
+ if (!gl)
+ gl = this.layer_.source.getContext('experimental-webgl');
+ if (!gl)
+ gl = this.layer_.source.getContext('webgl2');
+
+ if (!gl)
+ return; // Can't do distortion without a WebGL context.
+
+ // Provides a way to opt out of distortion
+ if (this.layer_.predistorted) {
+ if (!WebVRConfig.CARDBOARD_UI_DISABLED) {
+ gl.canvas.width = Util.getScreenWidth() * this.bufferScale_;
+ gl.canvas.height = Util.getScreenHeight() * this.bufferScale_;
+ this.cardboardUI_ = new CardboardUI(gl);
+ }
+ } else {
+ // Create a new distorter for the target context
+ this.distorter_ = new CardboardDistorter(gl);
+ this.distorter_.updateDeviceInfo(this.deviceInfo_);
+ this.cardboardUI_ = this.distorter_.cardboardUI;
+ }
+
+ if (this.cardboardUI_) {
+ this.cardboardUI_.listen(function(e) {
+ // Options clicked.
+ this.viewerSelector_.show(this.layer_.source.parentElement);
+ e.stopPropagation();
+ e.preventDefault();
+ }.bind(this), function(e) {
+ // Back clicked.
+ this.exitPresent();
+ e.stopPropagation();
+ e.preventDefault();
+ }.bind(this));
+ }
+
+ if (this.rotateInstructions_) {
+ if (Util.isLandscapeMode() && Util.isMobile()) {
+ // In landscape mode, temporarily show the "put into Cardboard"
+ // interstitial. Otherwise, do the default thing.
+ this.rotateInstructions_.showTemporarily(3000, this.layer_.source.parentElement);
+ } else {
+ this.rotateInstructions_.update();
+ }
+ }
+
+ // Listen for orientation change events in order to show interstitial.
+ this.orientationHandler = this.onOrientationChange_.bind(this);
+ window.addEventListener('orientationchange', this.orientationHandler);
+
+ // Listen for present display change events in order to update distorter dimensions
+ this.vrdisplaypresentchangeHandler = this.updateBounds_.bind(this);
+ window.addEventListener('vrdisplaypresentchange', this.vrdisplaypresentchangeHandler);
+
+ // Fire this event initially, to give geometry-distortion clients the chance
+ // to do something custom.
+ this.fireVRDisplayDeviceParamsChange_();
+};
+
+CardboardVRDisplay.prototype.endPresent_ = function() {
+ if (this.distorter_) {
+ this.distorter_.destroy();
+ this.distorter_ = null;
+ }
+ if (this.cardboardUI_) {
+ this.cardboardUI_.destroy();
+ this.cardboardUI_ = null;
+ }
+
+ if (this.rotateInstructions_) {
+ this.rotateInstructions_.hide();
+ }
+ this.viewerSelector_.hide();
+
+ window.removeEventListener('orientationchange', this.orientationHandler);
+ window.removeEventListener('vrdisplaypresentchange', this.vrdisplaypresentchangeHandler);
+};
+
+CardboardVRDisplay.prototype.submitFrame = function(pose) {
+ if (this.distorter_) {
+ this.distorter_.submitFrame();
+ } else if (this.cardboardUI_ && this.layer_) {
+ // Hack for predistorted: true.
+ var canvas = this.layer_.source.getContext('webgl').canvas;
+ if (canvas.width != this.lastWidth || canvas.height != this.lastHeight) {
+ this.cardboardUI_.onResize();
+ }
+ this.lastWidth = canvas.width;
+ this.lastHeight = canvas.height;
+
+ // Render the Cardboard UI.
+ this.cardboardUI_.render();
+ }
+};
+
+CardboardVRDisplay.prototype.onOrientationChange_ = function(e) {
+ console.log('onOrientationChange_');
+
+ // Hide the viewer selector.
+ this.viewerSelector_.hide();
+
+ // Update the rotate instructions.
+ if (this.rotateInstructions_) {
+ this.rotateInstructions_.update();
+ }
+
+ this.onResize_();
+};
+
+CardboardVRDisplay.prototype.onResize_ = function(e) {
+ if (this.layer_) {
+ var gl = this.layer_.source.getContext('webgl');
+ // Size the CSS canvas.
+ // Added padding on right and bottom because iPhone 5 will not
+ // hide the URL bar unless content is bigger than the screen.
+ // This will not be visible as long as the container element (e.g. body)
+ // is set to 'overflow: hidden'.
+ var cssProperties = [
+ 'position: absolute',
+ 'top: 0',
+ 'left: 0',
+ 'width: ' + Math.max(screen.width, screen.height) + 'px',
+ 'height: ' + Math.min(screen.height, screen.width) + 'px',
+ 'border: 0',
+ 'margin: 0',
+ 'padding: 0 10px 10px 0',
+ ];
+ gl.canvas.setAttribute('style', cssProperties.join('; ') + ';');
+
+ Util.safariCssSizeWorkaround(gl.canvas);
+ }
+};
+
+CardboardVRDisplay.prototype.onViewerChanged_ = function(viewer) {
+ this.deviceInfo_.setViewer(viewer);
+
+ if (this.distorter_) {
+ // Update the distortion appropriately.
+ this.distorter_.updateDeviceInfo(this.deviceInfo_);
+ }
+
+ // Fire a new event containing viewer and device parameters for clients that
+ // want to implement their own geometry-based distortion.
+ this.fireVRDisplayDeviceParamsChange_();
+};
+
+CardboardVRDisplay.prototype.fireVRDisplayDeviceParamsChange_ = function() {
+ var event = new CustomEvent('vrdisplaydeviceparamschange', {
+ detail: {
+ vrdisplay: this,
+ deviceInfo: this.deviceInfo_,
+ }
+ });
+ window.dispatchEvent(event);
+};
+
+module.exports = CardboardVRDisplay;
+
+},{"./base.js":2,"./cardboard-distorter.js":3,"./cardboard-ui.js":4,"./device-info.js":7,"./dpdb/dpdb.js":11,"./rotate-instructions.js":16,"./sensor-fusion/fusion-pose-sensor.js":18,"./util.js":22,"./viewer-selector.js":23}],6:[function(_dereq_,module,exports){
+/*
+Copyright (c) 2016, Brandon Jones.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+*/
+
+/*
+Caches specified GL state, runs a callback, and restores the cached state when
+done.
+
+Example usage:
+
+var savedState = [
+ gl.ARRAY_BUFFER_BINDING,
+
+ // TEXTURE_BINDING_2D or _CUBE_MAP must always be followed by the texure unit.
+ gl.TEXTURE_BINDING_2D, gl.TEXTURE0,
+
+ gl.CLEAR_COLOR,
+];
+// After this call the array buffer, texture unit 0, active texture, and clear
+// color will be restored. The viewport will remain changed, however, because
+// gl.VIEWPORT was not included in the savedState list.
+WGLUPreserveGLState(gl, savedState, function(gl) {
+ gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
+ gl.bufferData(gl.ARRAY_BUFFER, ....);
+
+ gl.activeTexture(gl.TEXTURE0);
+ gl.bindTexture(gl.TEXTURE_2D, texture);
+ gl.texImage2D(gl.TEXTURE_2D, ...);
+
+ gl.clearColor(1, 0, 0, 1);
+ gl.clear(gl.COLOR_BUFFER_BIT);
+});
+
+Note that this is not intended to be fast. Managing state in your own code to
+avoid redundant state setting and querying will always be faster. This function
+is most useful for cases where you may not have full control over the WebGL
+calls being made, such as tooling or effect injectors.
+*/
+
+function WGLUPreserveGLState(gl, bindings, callback) {
+ if (!bindings) {
+ callback(gl);
+ return;
+ }
+
+ var boundValues = [];
+
+ var activeTexture = null;
+ for (var i = 0; i < bindings.length; ++i) {
+ var binding = bindings[i];
+ switch (binding) {
+ case gl.TEXTURE_BINDING_2D:
+ case gl.TEXTURE_BINDING_CUBE_MAP:
+ var textureUnit = bindings[++i];
+ if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31) {
+ console.error("TEXTURE_BINDING_2D or TEXTURE_BINDING_CUBE_MAP must be followed by a valid texture unit");
+ boundValues.push(null, null);
+ break;
+ }
+ if (!activeTexture) {
+ activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
+ }
+ gl.activeTexture(textureUnit);
+ boundValues.push(gl.getParameter(binding), null);
+ break;
+ case gl.ACTIVE_TEXTURE:
+ activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
+ boundValues.push(null);
+ break;
+ default:
+ boundValues.push(gl.getParameter(binding));
+ break;
+ }
+ }
+
+ callback(gl);
+
+ for (var i = 0; i < bindings.length; ++i) {
+ var binding = bindings[i];
+ var boundValue = boundValues[i];
+ switch (binding) {
+ case gl.ACTIVE_TEXTURE:
+ break; // Ignore this binding, since we special-case it to happen last.
+ case gl.ARRAY_BUFFER_BINDING:
+ gl.bindBuffer(gl.ARRAY_BUFFER, boundValue);
+ break;
+ case gl.COLOR_CLEAR_VALUE:
+ gl.clearColor(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
+ break;
+ case gl.COLOR_WRITEMASK:
+ gl.colorMask(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
+ break;
+ case gl.CURRENT_PROGRAM:
+ gl.useProgram(boundValue);
+ break;
+ case gl.ELEMENT_ARRAY_BUFFER_BINDING:
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, boundValue);
+ break;
+ case gl.FRAMEBUFFER_BINDING:
+ gl.bindFramebuffer(gl.FRAMEBUFFER, boundValue);
+ break;
+ case gl.RENDERBUFFER_BINDING:
+ gl.bindRenderbuffer(gl.RENDERBUFFER, boundValue);
+ break;
+ case gl.TEXTURE_BINDING_2D:
+ var textureUnit = bindings[++i];
+ if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
+ break;
+ gl.activeTexture(textureUnit);
+ gl.bindTexture(gl.TEXTURE_2D, boundValue);
+ break;
+ case gl.TEXTURE_BINDING_CUBE_MAP:
+ var textureUnit = bindings[++i];
+ if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
+ break;
+ gl.activeTexture(textureUnit);
+ gl.bindTexture(gl.TEXTURE_CUBE_MAP, boundValue);
+ break;
+ case gl.VIEWPORT:
+ gl.viewport(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
+ break;
+ case gl.BLEND:
+ case gl.CULL_FACE:
+ case gl.DEPTH_TEST:
+ case gl.SCISSOR_TEST:
+ case gl.STENCIL_TEST:
+ if (boundValue) {
+ gl.enable(binding);
+ } else {
+ gl.disable(binding);
+ }
+ break;
+ default:
+ console.log("No GL restore behavior for 0x" + binding.toString(16));
+ break;
+ }
+
+ if (activeTexture) {
+ gl.activeTexture(activeTexture);
+ }
+ }
+}
+
+module.exports = WGLUPreserveGLState;
+},{}],7:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var Distortion = _dereq_('./distortion/distortion.js');
+var MathUtil = _dereq_('./math-util.js');
+var Util = _dereq_('./util.js');
+
+function Device(params) {
+ this.width = params.width || Util.getScreenWidth();
+ this.height = params.height || Util.getScreenHeight();
+ this.widthMeters = params.widthMeters;
+ this.heightMeters = params.heightMeters;
+ this.bevelMeters = params.bevelMeters;
+}
+
+
+// Fallback Android device (based on Nexus 5 measurements) for use when
+// we can't recognize an Android device.
+var DEFAULT_ANDROID = new Device({
+ widthMeters: 0.110,
+ heightMeters: 0.062,
+ bevelMeters: 0.004
+});
+
+// Fallback iOS device (based on iPhone6) for use when
+// we can't recognize an Android device.
+var DEFAULT_IOS = new Device({
+ widthMeters: 0.1038,
+ heightMeters: 0.0584,
+ bevelMeters: 0.004
+});
+
+
+var Viewers = {
+ CardboardV1: new CardboardViewer({
+ id: 'CardboardV1',
+ label: 'Cardboard I/O 2014',
+ fov: 40,
+ interLensDistance: 0.060,
+ baselineLensDistance: 0.035,
+ screenLensDistance: 0.042,
+ distortionCoefficients: [0.441, 0.156],
+ inverseCoefficients: [-0.4410035, 0.42756155, -0.4804439, 0.5460139,
+ -0.58821183, 0.5733938, -0.48303202, 0.33299083, -0.17573841,
+ 0.0651772, -0.01488963, 0.001559834]
+ }),
+ CardboardV2: new CardboardViewer({
+ id: 'CardboardV2',
+ label: 'Cardboard I/O 2015',
+ fov: 60,
+ interLensDistance: 0.064,
+ baselineLensDistance: 0.035,
+ screenLensDistance: 0.039,
+ distortionCoefficients: [0.34, 0.55],
+ inverseCoefficients: [-0.33836704, -0.18162185, 0.862655, -1.2462051,
+ 1.0560602, -0.58208317, 0.21609078, -0.05444823, 0.009177956,
+ -9.904169E-4, 6.183535E-5, -1.6981803E-6]
+ })
+};
+
+
+var DEFAULT_LEFT_CENTER = {x: 0.5, y: 0.5};
+var DEFAULT_RIGHT_CENTER = {x: 0.5, y: 0.5};
+
+/**
+ * Manages information about the device and the viewer.
+ *
+ * deviceParams indicates the parameters of the device to use (generally
+ * obtained from dpdb.getDeviceParams()). Can be null to mean no device
+ * params were found.
+ */
+function DeviceInfo(deviceParams) {
+ this.viewer = Viewers.CardboardV2;
+ this.updateDeviceParams(deviceParams);
+ this.distortion = new Distortion(this.viewer.distortionCoefficients);
+}
+
+DeviceInfo.prototype.updateDeviceParams = function(deviceParams) {
+ this.device = this.determineDevice_(deviceParams) || this.device;
+};
+
+DeviceInfo.prototype.getDevice = function() {
+ return this.device;
+};
+
+DeviceInfo.prototype.setViewer = function(viewer) {
+ this.viewer = viewer;
+ this.distortion = new Distortion(this.viewer.distortionCoefficients);
+};
+
+DeviceInfo.prototype.determineDevice_ = function(deviceParams) {
+ if (!deviceParams) {
+ // No parameters, so use a default.
+ if (Util.isIOS()) {
+ console.warn('Using fallback iOS device measurements.');
+ return DEFAULT_IOS;
+ } else {
+ console.warn('Using fallback Android device measurements.');
+ return DEFAULT_ANDROID;
+ }
+ }
+
+ // Compute device screen dimensions based on deviceParams.
+ var METERS_PER_INCH = 0.0254;
+ var metersPerPixelX = METERS_PER_INCH / deviceParams.xdpi;
+ var metersPerPixelY = METERS_PER_INCH / deviceParams.ydpi;
+ var width = Util.getScreenWidth();
+ var height = Util.getScreenHeight();
+ return new Device({
+ widthMeters: metersPerPixelX * width,
+ heightMeters: metersPerPixelY * height,
+ bevelMeters: deviceParams.bevelMm * 0.001,
+ });
+};
+
+/**
+ * Calculates field of view for the left eye.
+ */
+DeviceInfo.prototype.getDistortedFieldOfViewLeftEye = function() {
+ var viewer = this.viewer;
+ var device = this.device;
+ var distortion = this.distortion;
+
+ // Device.height and device.width for device in portrait mode, so transpose.
+ var eyeToScreenDistance = viewer.screenLensDistance;
+
+ var outerDist = (device.widthMeters - viewer.interLensDistance) / 2;
+ var innerDist = viewer.interLensDistance / 2;
+ var bottomDist = viewer.baselineLensDistance - device.bevelMeters;
+ var topDist = device.heightMeters - bottomDist;
+
+ var outerAngle = MathUtil.radToDeg * Math.atan(
+ distortion.distort(outerDist / eyeToScreenDistance));
+ var innerAngle = MathUtil.radToDeg * Math.atan(
+ distortion.distort(innerDist / eyeToScreenDistance));
+ var bottomAngle = MathUtil.radToDeg * Math.atan(
+ distortion.distort(bottomDist / eyeToScreenDistance));
+ var topAngle = MathUtil.radToDeg * Math.atan(
+ distortion.distort(topDist / eyeToScreenDistance));
+
+ return {
+ leftDegrees: Math.min(outerAngle, viewer.fov),
+ rightDegrees: Math.min(innerAngle, viewer.fov),
+ downDegrees: Math.min(bottomAngle, viewer.fov),
+ upDegrees: Math.min(topAngle, viewer.fov)
+ };
+};
+
+/**
+ * Calculates the tan-angles from the maximum FOV for the left eye for the
+ * current device and screen parameters.
+ */
+DeviceInfo.prototype.getLeftEyeVisibleTanAngles = function() {
+ var viewer = this.viewer;
+ var device = this.device;
+ var distortion = this.distortion;
+
+ // Tan-angles from the max FOV.
+ var fovLeft = Math.tan(-MathUtil.degToRad * viewer.fov);
+ var fovTop = Math.tan(MathUtil.degToRad * viewer.fov);
+ var fovRight = Math.tan(MathUtil.degToRad * viewer.fov);
+ var fovBottom = Math.tan(-MathUtil.degToRad * viewer.fov);
+ // Viewport size.
+ var halfWidth = device.widthMeters / 4;
+ var halfHeight = device.heightMeters / 2;
+ // Viewport center, measured from left lens position.
+ var verticalLensOffset = (viewer.baselineLensDistance - device.bevelMeters - halfHeight);
+ var centerX = viewer.interLensDistance / 2 - halfWidth;
+ var centerY = -verticalLensOffset;
+ var centerZ = viewer.screenLensDistance;
+ // Tan-angles of the viewport edges, as seen through the lens.
+ var screenLeft = distortion.distort((centerX - halfWidth) / centerZ);
+ var screenTop = distortion.distort((centerY + halfHeight) / centerZ);
+ var screenRight = distortion.distort((centerX + halfWidth) / centerZ);
+ var screenBottom = distortion.distort((centerY - halfHeight) / centerZ);
+ // Compare the two sets of tan-angles and take the value closer to zero on each side.
+ var result = new Float32Array(4);
+ result[0] = Math.max(fovLeft, screenLeft);
+ result[1] = Math.min(fovTop, screenTop);
+ result[2] = Math.min(fovRight, screenRight);
+ result[3] = Math.max(fovBottom, screenBottom);
+ return result;
+};
+
+/**
+ * Calculates the tan-angles from the maximum FOV for the left eye for the
+ * current device and screen parameters, assuming no lenses.
+ */
+DeviceInfo.prototype.getLeftEyeNoLensTanAngles = function() {
+ var viewer = this.viewer;
+ var device = this.device;
+ var distortion = this.distortion;
+
+ var result = new Float32Array(4);
+ // Tan-angles from the max FOV.
+ var fovLeft = distortion.distortInverse(Math.tan(-MathUtil.degToRad * viewer.fov));
+ var fovTop = distortion.distortInverse(Math.tan(MathUtil.degToRad * viewer.fov));
+ var fovRight = distortion.distortInverse(Math.tan(MathUtil.degToRad * viewer.fov));
+ var fovBottom = distortion.distortInverse(Math.tan(-MathUtil.degToRad * viewer.fov));
+ // Viewport size.
+ var halfWidth = device.widthMeters / 4;
+ var halfHeight = device.heightMeters / 2;
+ // Viewport center, measured from left lens position.
+ var verticalLensOffset = (viewer.baselineLensDistance - device.bevelMeters - halfHeight);
+ var centerX = viewer.interLensDistance / 2 - halfWidth;
+ var centerY = -verticalLensOffset;
+ var centerZ = viewer.screenLensDistance;
+ // Tan-angles of the viewport edges, as seen through the lens.
+ var screenLeft = (centerX - halfWidth) / centerZ;
+ var screenTop = (centerY + halfHeight) / centerZ;
+ var screenRight = (centerX + halfWidth) / centerZ;
+ var screenBottom = (centerY - halfHeight) / centerZ;
+ // Compare the two sets of tan-angles and take the value closer to zero on each side.
+ result[0] = Math.max(fovLeft, screenLeft);
+ result[1] = Math.min(fovTop, screenTop);
+ result[2] = Math.min(fovRight, screenRight);
+ result[3] = Math.max(fovBottom, screenBottom);
+ return result;
+};
+
+/**
+ * Calculates the screen rectangle visible from the left eye for the
+ * current device and screen parameters.
+ */
+DeviceInfo.prototype.getLeftEyeVisibleScreenRect = function(undistortedFrustum) {
+ var viewer = this.viewer;
+ var device = this.device;
+
+ var dist = viewer.screenLensDistance;
+ var eyeX = (device.widthMeters - viewer.interLensDistance) / 2;
+ var eyeY = viewer.baselineLensDistance - device.bevelMeters;
+ var left = (undistortedFrustum[0] * dist + eyeX) / device.widthMeters;
+ var top = (undistortedFrustum[1] * dist + eyeY) / device.heightMeters;
+ var right = (undistortedFrustum[2] * dist + eyeX) / device.widthMeters;
+ var bottom = (undistortedFrustum[3] * dist + eyeY) / device.heightMeters;
+ return {
+ x: left,
+ y: bottom,
+ width: right - left,
+ height: top - bottom
+ };
+};
+
+DeviceInfo.prototype.getFieldOfViewLeftEye = function(opt_isUndistorted) {
+ return opt_isUndistorted ? this.getUndistortedFieldOfViewLeftEye() :
+ this.getDistortedFieldOfViewLeftEye();
+};
+
+DeviceInfo.prototype.getFieldOfViewRightEye = function(opt_isUndistorted) {
+ var fov = this.getFieldOfViewLeftEye(opt_isUndistorted);
+ return {
+ leftDegrees: fov.rightDegrees,
+ rightDegrees: fov.leftDegrees,
+ upDegrees: fov.upDegrees,
+ downDegrees: fov.downDegrees
+ };
+};
+
+/**
+ * Calculates undistorted field of view for the left eye.
+ */
+DeviceInfo.prototype.getUndistortedFieldOfViewLeftEye = function() {
+ var p = this.getUndistortedParams_();
+
+ return {
+ leftDegrees: MathUtil.radToDeg * Math.atan(p.outerDist),
+ rightDegrees: MathUtil.radToDeg * Math.atan(p.innerDist),
+ downDegrees: MathUtil.radToDeg * Math.atan(p.bottomDist),
+ upDegrees: MathUtil.radToDeg * Math.atan(p.topDist)
+ };
+};
+
+DeviceInfo.prototype.getUndistortedViewportLeftEye = function() {
+ var p = this.getUndistortedParams_();
+ var viewer = this.viewer;
+ var device = this.device;
+
+ // Distances stored in local variables are in tan-angle units unless otherwise
+ // noted.
+ var eyeToScreenDistance = viewer.screenLensDistance;
+ var screenWidth = device.widthMeters / eyeToScreenDistance;
+ var screenHeight = device.heightMeters / eyeToScreenDistance;
+ var xPxPerTanAngle = device.width / screenWidth;
+ var yPxPerTanAngle = device.height / screenHeight;
+
+ var x = Math.round((p.eyePosX - p.outerDist) * xPxPerTanAngle);
+ var y = Math.round((p.eyePosY - p.bottomDist) * yPxPerTanAngle);
+ return {
+ x: x,
+ y: y,
+ width: Math.round((p.eyePosX + p.innerDist) * xPxPerTanAngle) - x,
+ height: Math.round((p.eyePosY + p.topDist) * yPxPerTanAngle) - y
+ };
+};
+
+DeviceInfo.prototype.getUndistortedParams_ = function() {
+ var viewer = this.viewer;
+ var device = this.device;
+ var distortion = this.distortion;
+
+ // Most of these variables in tan-angle units.
+ var eyeToScreenDistance = viewer.screenLensDistance;
+ var halfLensDistance = viewer.interLensDistance / 2 / eyeToScreenDistance;
+ var screenWidth = device.widthMeters / eyeToScreenDistance;
+ var screenHeight = device.heightMeters / eyeToScreenDistance;
+
+ var eyePosX = screenWidth / 2 - halfLensDistance;
+ var eyePosY = (viewer.baselineLensDistance - device.bevelMeters) / eyeToScreenDistance;
+
+ var maxFov = viewer.fov;
+ var viewerMax = distortion.distortInverse(Math.tan(MathUtil.degToRad * maxFov));
+ var outerDist = Math.min(eyePosX, viewerMax);
+ var innerDist = Math.min(halfLensDistance, viewerMax);
+ var bottomDist = Math.min(eyePosY, viewerMax);
+ var topDist = Math.min(screenHeight - eyePosY, viewerMax);
+
+ return {
+ outerDist: outerDist,
+ innerDist: innerDist,
+ topDist: topDist,
+ bottomDist: bottomDist,
+ eyePosX: eyePosX,
+ eyePosY: eyePosY
+ };
+};
+
+
+function CardboardViewer(params) {
+ // A machine readable ID.
+ this.id = params.id;
+ // A human readable label.
+ this.label = params.label;
+
+ // Field of view in degrees (per side).
+ this.fov = params.fov;
+
+ // Distance between lens centers in meters.
+ this.interLensDistance = params.interLensDistance;
+ // Distance between viewer baseline and lens center in meters.
+ this.baselineLensDistance = params.baselineLensDistance;
+ // Screen-to-lens distance in meters.
+ this.screenLensDistance = params.screenLensDistance;
+
+ // Distortion coefficients.
+ this.distortionCoefficients = params.distortionCoefficients;
+ // Inverse distortion coefficients.
+ // TODO: Calculate these from distortionCoefficients in the future.
+ this.inverseCoefficients = params.inverseCoefficients;
+}
+
+// Export viewer information.
+DeviceInfo.Viewers = Viewers;
+module.exports = DeviceInfo;
+
+},{"./distortion/distortion.js":9,"./math-util.js":14,"./util.js":22}],8:[function(_dereq_,module,exports){
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var VRDisplay = _dereq_('./base.js').VRDisplay;
+var HMDVRDevice = _dereq_('./base.js').HMDVRDevice;
+var PositionSensorVRDevice = _dereq_('./base.js').PositionSensorVRDevice;
+
+/**
+ * Wraps a VRDisplay and exposes it as a HMDVRDevice
+ */
+function VRDisplayHMDDevice(display) {
+ this.display = display;
+
+ this.hardwareUnitId = display.displayId;
+ this.deviceId = 'webvr-polyfill:HMD:' + display.displayId;
+ this.deviceName = display.displayName + ' (HMD)';
+}
+VRDisplayHMDDevice.prototype = new HMDVRDevice();
+
+VRDisplayHMDDevice.prototype.getEyeParameters = function(whichEye) {
+ var eyeParameters = this.display.getEyeParameters(whichEye);
+
+ return {
+ currentFieldOfView: eyeParameters.fieldOfView,
+ maximumFieldOfView: eyeParameters.fieldOfView,
+ minimumFieldOfView: eyeParameters.fieldOfView,
+ recommendedFieldOfView: eyeParameters.fieldOfView,
+ eyeTranslation: { x: eyeParameters.offset[0], y: eyeParameters.offset[1], z: eyeParameters.offset[2] },
+ renderRect: {
+ x: (whichEye == 'right') ? eyeParameters.renderWidth : 0,
+ y: 0,
+ width: eyeParameters.renderWidth,
+ height: eyeParameters.renderHeight
+ }
+ };
+};
+
+VRDisplayHMDDevice.prototype.setFieldOfView =
+ function(opt_fovLeft, opt_fovRight, opt_zNear, opt_zFar) {
+ // Not supported. getEyeParameters reports that the min, max, and recommended
+ // FoV is all the same, so no adjustment can be made.
+};
+
+// TODO: Need to hook requestFullscreen to see if a wrapped VRDisplay was passed
+// in as an option. If so we should prevent the default fullscreen behavior and
+// call VRDisplay.requestPresent instead.
+
+/**
+ * Wraps a VRDisplay and exposes it as a PositionSensorVRDevice
+ */
+function VRDisplayPositionSensorDevice(display) {
+ this.display = display;
+
+ this.hardwareUnitId = display.displayId;
+ this.deviceId = 'webvr-polyfill:PositionSensor: ' + display.displayId;
+ this.deviceName = display.displayName + ' (PositionSensor)';
+}
+VRDisplayPositionSensorDevice.prototype = new PositionSensorVRDevice();
+
+VRDisplayPositionSensorDevice.prototype.getState = function() {
+ var pose = this.display.getPose();
+ return {
+ position: pose.position ? { x: pose.position[0], y: pose.position[1], z: pose.position[2] } : null,
+ orientation: pose.orientation ? { x: pose.orientation[0], y: pose.orientation[1], z: pose.orientation[2], w: pose.orientation[3] } : null,
+ linearVelocity: null,
+ linearAcceleration: null,
+ angularVelocity: null,
+ angularAcceleration: null
+ };
+};
+
+VRDisplayPositionSensorDevice.prototype.resetState = function() {
+ return this.positionDevice.resetPose();
+};
+
+
+module.exports.VRDisplayHMDDevice = VRDisplayHMDDevice;
+module.exports.VRDisplayPositionSensorDevice = VRDisplayPositionSensorDevice;
+
+
+},{"./base.js":2}],9:[function(_dereq_,module,exports){
+/**
+ * TODO(smus): Implement coefficient inversion.
+ */
+function Distortion(coefficients) {
+ this.coefficients = coefficients;
+}
+
+/**
+ * Calculates the inverse distortion for a radius.
+ * </p><p>
+ * Allows to compute the original undistorted radius from a distorted one.
+ * See also getApproximateInverseDistortion() for a faster but potentially
+ * less accurate method.
+ *
+ * @param {Number} radius Distorted radius from the lens center in tan-angle units.
+ * @return {Number} The undistorted radius in tan-angle units.
+ */
+Distortion.prototype.distortInverse = function(radius) {
+ // Secant method.
+ var r0 = 0;
+ var r1 = 1;
+ var dr0 = radius - this.distort(r0);
+ while (Math.abs(r1 - r0) > 0.0001 /** 0.1mm */) {
+ var dr1 = radius - this.distort(r1);
+ var r2 = r1 - dr1 * ((r1 - r0) / (dr1 - dr0));
+ r0 = r1;
+ r1 = r2;
+ dr0 = dr1;
+ }
+ return r1;
+};
+
+/**
+ * Distorts a radius by its distortion factor from the center of the lenses.
+ *
+ * @param {Number} radius Radius from the lens center in tan-angle units.
+ * @return {Number} The distorted radius in tan-angle units.
+ */
+Distortion.prototype.distort = function(radius) {
+ var r2 = radius * radius;
+ var ret = 0;
+ for (var i = 0; i < this.coefficients.length; i++) {
+ ret = r2 * (ret + this.coefficients[i]);
+ }
+ return (ret + 1) * radius;
+};
+
+// Functions below roughly ported from
+// https://github.com/googlesamples/cardboard-unity/blob/master/Cardboard/Scripts/CardboardProfile.cs#L412
+
+// Solves a small linear equation via destructive gaussian
+// elimination and back substitution. This isn't generic numeric
+// code, it's just a quick hack to work with the generally
+// well-behaved symmetric matrices for least-squares fitting.
+// Not intended for reuse.
+//
+// @param a Input positive definite symmetrical matrix. Destroyed
+// during calculation.
+// @param y Input right-hand-side values. Destroyed during calculation.
+// @return Resulting x value vector.
+//
+Distortion.prototype.solveLinear_ = function(a, y) {
+ var n = a.length;
+
+ // Gaussian elimination (no row exchange) to triangular matrix.
+ // The input matrix is a A^T A product which should be a positive
+ // definite symmetrical matrix, and if I remember my linear
+ // algebra right this implies that the pivots will be nonzero and
+ // calculations sufficiently accurate without needing row
+ // exchange.
+ for (var j = 0; j < n - 1; ++j) {
+ for (var k = j + 1; k < n; ++k) {
+ var p = a[j][k] / a[j][j];
+ for (var i = j + 1; i < n; ++i) {
+ a[i][k] -= p * a[i][j];
+ }
+ y[k] -= p * y[j];
+ }
+ }
+ // From this point on, only the matrix elements a[j][i] with i>=j are
+ // valid. The elimination doesn't fill in eliminated 0 values.
+
+ var x = new Array(n);
+
+ // Back substitution.
+ for (var j = n - 1; j >= 0; --j) {
+ var v = y[j];
+ for (var i = j + 1; i < n; ++i) {
+ v -= a[i][j] * x[i];
+ }
+ x[j] = v / a[j][j];
+ }
+
+ return x;
+};
+
+// Solves a least-squares matrix equation. Given the equation A * x = y, calculate the
+// least-square fit x = inverse(A * transpose(A)) * transpose(A) * y. The way this works
+// is that, while A is typically not a square matrix (and hence not invertible), A * transpose(A)
+// is always square. That is:
+// A * x = y
+// transpose(A) * (A * x) = transpose(A) * y <- multiply both sides by transpose(A)
+// (transpose(A) * A) * x = transpose(A) * y <- associativity
+// x = inverse(transpose(A) * A) * transpose(A) * y <- solve for x
+// Matrix A's row count (first index) must match y's value count. A's column count (second index)
+// determines the length of the result vector x.
+Distortion.prototype.solveLeastSquares_ = function(matA, vecY) {
+ var i, j, k, sum;
+ var numSamples = matA.length;
+ var numCoefficients = matA[0].length;
+ if (numSamples != vecY.Length) {
+ throw new Error("Matrix / vector dimension mismatch");
+ }
+
+ // Calculate transpose(A) * A
+ var matATA = new Array(numCoefficients);
+ for (k = 0; k < numCoefficients; ++k) {
+ matATA[k] = new Array(numCoefficients);
+ for (j = 0; j < numCoefficients; ++j) {
+ sum = 0;
+ for (i = 0; i < numSamples; ++i) {
+ sum += matA[j][i] * matA[k][i];
+ }
+ matATA[k][j] = sum;
+ }
+ }
+
+ // Calculate transpose(A) * y
+ var vecATY = new Array(numCoefficients);
+ for (j = 0; j < numCoefficients; ++j) {
+ sum = 0;
+ for (i = 0; i < numSamples; ++i) {
+ sum += matA[j][i] * vecY[i];
+ }
+ vecATY[j] = sum;
+ }
+
+ // Now solve (A * transpose(A)) * x = transpose(A) * y.
+ return this.solveLinear_(matATA, vecATY);
+};
+
+/// Calculates an approximate inverse to the given radial distortion parameters.
+Distortion.prototype.approximateInverse = function(maxRadius, numSamples) {
+ maxRadius = maxRadius || 1;
+ numSamples = numSamples || 100;
+ var numCoefficients = 6;
+ var i, j;
+
+ // R + K1*R^3 + K2*R^5 = r, with R = rp = distort(r)
+ // Repeating for numSamples:
+ // [ R0^3, R0^5 ] * [ K1 ] = [ r0 - R0 ]
+ // [ R1^3, R1^5 ] [ K2 ] [ r1 - R1 ]
+ // [ R2^3, R2^5 ] [ r2 - R2 ]
+ // [ etc... ] [ etc... ]
+ // That is:
+ // matA * [K1, K2] = y
+ // Solve:
+ // [K1, K2] = inverse(transpose(matA) * matA) * transpose(matA) * y
+ var matA = new Array(numCoefficients);
+ for (j = 0; j < numCoefficients; ++j) {
+ matA[j] = new Array(numSamples);
+ }
+ var vecY = new Array(numSamples);
+
+ for (i = 0; i < numSamples; ++i) {
+ var r = maxRadius * (i + 1) / numSamples;
+ var rp = this.distort(r);
+ var v = rp;
+ for (j = 0; j < numCoefficients; ++j) {
+ v *= rp * rp;
+ matA[j][i] = v;
+ }
+ vecY[i] = r - rp;
+ }
+
+ var inverseCoefficients = this.solveLeastSquares_(matA, vecY);
+
+ return new Distortion(inverseCoefficients);
+};
+
+module.exports = Distortion;
+
+},{}],10:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * DPDB cache.
+ */
+var DPDB_CACHE = {
+ "format": 1,
+ "last_updated": "2016-01-20T00:18:35Z",
+ "devices": [
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "asus/*/Nexus 7/*" },
+ { "ua": "Nexus 7" }
+ ],
+ "dpi": [ 320.8, 323.0 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "asus/*/ASUS_Z00AD/*" },
+ { "ua": "ASUS_Z00AD" }
+ ],
+ "dpi": [ 403.0, 404.6 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "HTC/*/HTC6435LVW/*" },
+ { "ua": "HTC6435LVW" }
+ ],
+ "dpi": [ 449.7, 443.3 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "HTC/*/HTC One XL/*" },
+ { "ua": "HTC One XL" }
+ ],
+ "dpi": [ 315.3, 314.6 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "htc/*/Nexus 9/*" },
+ { "ua": "Nexus 9" }
+ ],
+ "dpi": 289.0,
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "HTC/*/HTC One M9/*" },
+ { "ua": "HTC One M9" }
+ ],
+ "dpi": [ 442.5, 443.3 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "HTC/*/HTC One_M8/*" },
+ { "ua": "HTC One_M8" }
+ ],
+ "dpi": [ 449.7, 447.4 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "HTC/*/HTC One/*" },
+ { "ua": "HTC One" }
+ ],
+ "dpi": 472.8,
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "Huawei/*/Nexus 6P/*" },
+ { "ua": "Nexus 6P" }
+ ],
+ "dpi": [ 515.1, 518.0 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/Nexus 5X/*" },
+ { "ua": "Nexus 5X" }
+ ],
+ "dpi": [ 422.0, 419.9 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/LGMS345/*" },
+ { "ua": "LGMS345" }
+ ],
+ "dpi": [ 221.7, 219.1 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/LG-D800/*" },
+ { "ua": "LG-D800" }
+ ],
+ "dpi": [ 422.0, 424.1 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/LG-D850/*" },
+ { "ua": "LG-D850" }
+ ],
+ "dpi": [ 537.9, 541.9 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/VS985 4G/*" },
+ { "ua": "VS985 4G" }
+ ],
+ "dpi": [ 537.9, 535.6 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/Nexus 5/*" },
+ { "ua": "Nexus 5 " }
+ ],
+ "dpi": [ 442.4, 444.8 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/Nexus 4/*" },
+ { "ua": "Nexus 4" }
+ ],
+ "dpi": [ 319.8, 318.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/LG-P769/*" },
+ { "ua": "LG-P769" }
+ ],
+ "dpi": [ 240.6, 247.5 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/LGMS323/*" },
+ { "ua": "LGMS323" }
+ ],
+ "dpi": [ 206.6, 204.6 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "LGE/*/LGLS996/*" },
+ { "ua": "LGLS996" }
+ ],
+ "dpi": [ 403.4, 401.5 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "Micromax/*/4560MMX/*" },
+ { "ua": "4560MMX" }
+ ],
+ "dpi": [ 240.0, 219.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "Micromax/*/A250/*" },
+ { "ua": "Micromax A250" }
+ ],
+ "dpi": [ 480.0, 446.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "Micromax/*/Micromax AQ4501/*" },
+ { "ua": "Micromax AQ4501" }
+ ],
+ "dpi": 240.0,
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/DROID RAZR/*" },
+ { "ua": "DROID RAZR" }
+ ],
+ "dpi": [ 368.1, 256.7 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT830C/*" },
+ { "ua": "XT830C" }
+ ],
+ "dpi": [ 254.0, 255.9 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1021/*" },
+ { "ua": "XT1021" }
+ ],
+ "dpi": [ 254.0, 256.7 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1023/*" },
+ { "ua": "XT1023" }
+ ],
+ "dpi": [ 254.0, 256.7 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1028/*" },
+ { "ua": "XT1028" }
+ ],
+ "dpi": [ 326.6, 327.6 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1034/*" },
+ { "ua": "XT1034" }
+ ],
+ "dpi": [ 326.6, 328.4 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1053/*" },
+ { "ua": "XT1053" }
+ ],
+ "dpi": [ 315.3, 316.1 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1562/*" },
+ { "ua": "XT1562" }
+ ],
+ "dpi": [ 403.4, 402.7 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/Nexus 6/*" },
+ { "ua": "Nexus 6 " }
+ ],
+ "dpi": [ 494.3, 489.7 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1063/*" },
+ { "ua": "XT1063" }
+ ],
+ "dpi": [ 295.0, 296.6 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1064/*" },
+ { "ua": "XT1064" }
+ ],
+ "dpi": [ 295.0, 295.6 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1092/*" },
+ { "ua": "XT1092" }
+ ],
+ "dpi": [ 422.0, 424.1 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "motorola/*/XT1095/*" },
+ { "ua": "XT1095" }
+ ],
+ "dpi": [ 422.0, 423.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "OnePlus/*/A0001/*" },
+ { "ua": "A0001" }
+ ],
+ "dpi": [ 403.4, 401.0 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "OnePlus/*/ONE E1005/*" },
+ { "ua": "ONE E1005" }
+ ],
+ "dpi": [ 442.4, 441.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "OnePlus/*/ONE A2005/*" },
+ { "ua": "ONE A2005" }
+ ],
+ "dpi": [ 391.9, 405.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "OPPO/*/X909/*" },
+ { "ua": "X909" }
+ ],
+ "dpi": [ 442.4, 444.1 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/GT-I9082/*" },
+ { "ua": "GT-I9082" }
+ ],
+ "dpi": [ 184.7, 185.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G360P/*" },
+ { "ua": "SM-G360P" }
+ ],
+ "dpi": [ 196.7, 205.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/Nexus S/*" },
+ { "ua": "Nexus S" }
+ ],
+ "dpi": [ 234.5, 229.8 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/GT-I9300/*" },
+ { "ua": "GT-I9300" }
+ ],
+ "dpi": [ 304.8, 303.9 ],
+ "bw": 5,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-T230NU/*" },
+ { "ua": "SM-T230NU" }
+ ],
+ "dpi": 216.0,
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SGH-T399/*" },
+ { "ua": "SGH-T399" }
+ ],
+ "dpi": [ 217.7, 231.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-N9005/*" },
+ { "ua": "SM-N9005" }
+ ],
+ "dpi": [ 386.4, 387.0 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SAMSUNG-SM-N900A/*" },
+ { "ua": "SAMSUNG-SM-N900A" }
+ ],
+ "dpi": [ 386.4, 387.7 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/GT-I9500/*" },
+ { "ua": "GT-I9500" }
+ ],
+ "dpi": [ 442.5, 443.3 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/GT-I9505/*" },
+ { "ua": "GT-I9505" }
+ ],
+ "dpi": 439.4,
+ "bw": 4,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G900F/*" },
+ { "ua": "SM-G900F" }
+ ],
+ "dpi": [ 415.6, 431.6 ],
+ "bw": 5,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G900M/*" },
+ { "ua": "SM-G900M" }
+ ],
+ "dpi": [ 415.6, 431.6 ],
+ "bw": 5,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G800F/*" },
+ { "ua": "SM-G800F" }
+ ],
+ "dpi": 326.8,
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G906S/*" },
+ { "ua": "SM-G906S" }
+ ],
+ "dpi": [ 562.7, 572.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/GT-I9300/*" },
+ { "ua": "GT-I9300" }
+ ],
+ "dpi": [ 306.7, 304.8 ],
+ "bw": 5,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-T535/*" },
+ { "ua": "SM-T535" }
+ ],
+ "dpi": [ 142.6, 136.4 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-N920C/*" },
+ { "ua": "SM-N920C" }
+ ],
+ "dpi": [ 515.1, 518.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/GT-I9300I/*" },
+ { "ua": "GT-I9300I" }
+ ],
+ "dpi": [ 304.8, 305.8 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/GT-I9195/*" },
+ { "ua": "GT-I9195" }
+ ],
+ "dpi": [ 249.4, 256.7 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SPH-L520/*" },
+ { "ua": "SPH-L520" }
+ ],
+ "dpi": [ 249.4, 255.9 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SAMSUNG-SGH-I717/*" },
+ { "ua": "SAMSUNG-SGH-I717" }
+ ],
+ "dpi": 285.8,
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SPH-D710/*" },
+ { "ua": "SPH-D710" }
+ ],
+ "dpi": [ 217.7, 204.2 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/GT-N7100/*" },
+ { "ua": "GT-N7100" }
+ ],
+ "dpi": 265.1,
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SCH-I605/*" },
+ { "ua": "SCH-I605" }
+ ],
+ "dpi": 265.1,
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/Galaxy Nexus/*" },
+ { "ua": "Galaxy Nexus" }
+ ],
+ "dpi": [ 315.3, 314.2 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-N910H/*" },
+ { "ua": "SM-N910H" }
+ ],
+ "dpi": [ 515.1, 518.0 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-N910C/*" },
+ { "ua": "SM-N910C" }
+ ],
+ "dpi": [ 515.2, 520.2 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G130M/*" },
+ { "ua": "SM-G130M" }
+ ],
+ "dpi": [ 165.9, 164.8 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G928I/*" },
+ { "ua": "SM-G928I" }
+ ],
+ "dpi": [ 515.1, 518.4 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G920F/*" },
+ { "ua": "SM-G920F" }
+ ],
+ "dpi": 580.6,
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G920P/*" },
+ { "ua": "SM-G920P" }
+ ],
+ "dpi": [ 522.5, 577.0 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G925F/*" },
+ { "ua": "SM-G925F" }
+ ],
+ "dpi": 580.6,
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "samsung/*/SM-G925V/*" },
+ { "ua": "SM-G925V" }
+ ],
+ "dpi": [ 522.5, 576.6 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "Sony/*/C6903/*" },
+ { "ua": "C6903" }
+ ],
+ "dpi": [ 442.5, 443.3 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "Sony/*/D6653/*" },
+ { "ua": "D6653" }
+ ],
+ "dpi": [ 428.6, 427.6 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "Sony/*/E6653/*" },
+ { "ua": "E6653" }
+ ],
+ "dpi": [ 428.6, 425.7 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "Sony/*/E6853/*" },
+ { "ua": "E6853" }
+ ],
+ "dpi": [ 403.4, 401.9 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "Sony/*/SGP321/*" },
+ { "ua": "SGP321" }
+ ],
+ "dpi": [ 224.7, 224.1 ],
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "TCT/*/ALCATEL ONE TOUCH Fierce/*" },
+ { "ua": "ALCATEL ONE TOUCH Fierce" }
+ ],
+ "dpi": [ 240.0, 247.5 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "THL/*/thl 5000/*" },
+ { "ua": "thl 5000" }
+ ],
+ "dpi": [ 480.0, 443.3 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "android",
+ "rules": [
+ { "mdmh": "ZTE/*/ZTE Blade L2/*" },
+ { "ua": "ZTE Blade L2" }
+ ],
+ "dpi": 240.0,
+ "bw": 3,
+ "ac": 500
+ },
+
+ {
+ "type": "ios",
+ "rules": [ { "res": [ 640, 960 ] } ],
+ "dpi": [ 325.1, 328.4 ],
+ "bw": 4,
+ "ac": 1000
+ },
+
+ {
+ "type": "ios",
+ "rules": [ { "res": [ 640, 960 ] } ],
+ "dpi": [ 325.1, 328.4 ],
+ "bw": 4,
+ "ac": 1000
+ },
+
+ {
+ "type": "ios",
+ "rules": [ { "res": [ 640, 1136 ] } ],
+ "dpi": [ 317.1, 320.2 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "ios",
+ "rules": [ { "res": [ 640, 1136 ] } ],
+ "dpi": [ 317.1, 320.2 ],
+ "bw": 3,
+ "ac": 1000
+ },
+
+ {
+ "type": "ios",
+ "rules": [ { "res": [ 750, 1334 ] } ],
+ "dpi": 326.4,
+ "bw": 4,
+ "ac": 1000
+ },
+
+ {
+ "type": "ios",
+ "rules": [ { "res": [ 750, 1334 ] } ],
+ "dpi": 326.4,
+ "bw": 4,
+ "ac": 1000
+ },
+
+ {
+ "type": "ios",
+ "rules": [ { "res": [ 1242, 2208 ] } ],
+ "dpi": [ 453.6, 458.4 ],
+ "bw": 4,
+ "ac": 1000
+ },
+
+ {
+ "type": "ios",
+ "rules": [ { "res": [ 1242, 2208 ] } ],
+ "dpi": [ 453.6, 458.4 ],
+ "bw": 4,
+ "ac": 1000
+ }
+]};
+
+module.exports = DPDB_CACHE;
+
+},{}],11:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Offline cache of the DPDB, to be used until we load the online one (and
+// as a fallback in case we can't load the online one).
+var DPDB_CACHE = _dereq_('./dpdb-cache.js');
+var Util = _dereq_('../util.js');
+
+// Online DPDB URL.
+var ONLINE_DPDB_URL = 'https://storage.googleapis.com/cardboard-dpdb/dpdb.json';
+
+/**
+ * Calculates device parameters based on the DPDB (Device Parameter Database).
+ * Initially, uses the cached DPDB values.
+ *
+ * If fetchOnline == true, then this object tries to fetch the online version
+ * of the DPDB and updates the device info if a better match is found.
+ * Calls the onDeviceParamsUpdated callback when there is an update to the
+ * device information.
+ */
+function Dpdb(fetchOnline, onDeviceParamsUpdated) {
+ // Start with the offline DPDB cache while we are loading the real one.
+ this.dpdb = DPDB_CACHE;
+
+ // Calculate device params based on the offline version of the DPDB.
+ this.recalculateDeviceParams_();
+
+ // XHR to fetch online DPDB file, if requested.
+ if (fetchOnline) {
+ // Set the callback.
+ this.onDeviceParamsUpdated = onDeviceParamsUpdated;
+
+ console.log('Fetching DPDB...');
+ var xhr = new XMLHttpRequest();
+ var obj = this;
+ xhr.open('GET', ONLINE_DPDB_URL, true);
+ xhr.addEventListener('load', function() {
+ obj.loading = false;
+ if (xhr.status >= 200 && xhr.status <= 299) {
+ // Success.
+ console.log('Successfully loaded online DPDB.');
+ obj.dpdb = JSON.parse(xhr.response);
+ obj.recalculateDeviceParams_();
+ } else {
+ // Error loading the DPDB.
+ console.error('Error loading online DPDB!');
+ }
+ });
+ xhr.send();
+ }
+}
+
+// Returns the current device parameters.
+Dpdb.prototype.getDeviceParams = function() {
+ return this.deviceParams;
+};
+
+// Recalculates this device's parameters based on the DPDB.
+Dpdb.prototype.recalculateDeviceParams_ = function() {
+ console.log('Recalculating device params.');
+ var newDeviceParams = this.calcDeviceParams_();
+ console.log('New device parameters:');
+ console.log(newDeviceParams);
+ if (newDeviceParams) {
+ this.deviceParams = newDeviceParams;
+ // Invoke callback, if it is set.
+ if (this.onDeviceParamsUpdated) {
+ this.onDeviceParamsUpdated(this.deviceParams);
+ }
+ } else {
+ console.error('Failed to recalculate device parameters.');
+ }
+};
+
+// Returns a DeviceParams object that represents the best guess as to this
+// device's parameters. Can return null if the device does not match any
+// known devices.
+Dpdb.prototype.calcDeviceParams_ = function() {
+ var db = this.dpdb; // shorthand
+ if (!db) {
+ console.error('DPDB not available.');
+ return null;
+ }
+ if (db.format != 1) {
+ console.error('DPDB has unexpected format version.');
+ return null;
+ }
+ if (!db.devices || !db.devices.length) {
+ console.error('DPDB does not have a devices section.');
+ return null;
+ }
+
+ // Get the actual user agent and screen dimensions in pixels.
+ var userAgent = navigator.userAgent || navigator.vendor || window.opera;
+ var width = Util.getScreenWidth();
+ var height = Util.getScreenHeight();
+ console.log('User agent: ' + userAgent);
+ console.log('Pixel width: ' + width);
+ console.log('Pixel height: ' + height);
+
+ if (!db.devices) {
+ console.error('DPDB has no devices section.');
+ return null;
+ }
+
+ for (var i = 0; i < db.devices.length; i++) {
+ var device = db.devices[i];
+ if (!device.rules) {
+ console.warn('Device[' + i + '] has no rules section.');
+ continue;
+ }
+
+ if (device.type != 'ios' && device.type != 'android') {
+ console.warn('Device[' + i + '] has invalid type.');
+ continue;
+ }
+
+ // See if this device is of the appropriate type.
+ if (Util.isIOS() != (device.type == 'ios')) continue;
+
+ // See if this device matches any of the rules:
+ var matched = false;
+ for (var j = 0; j < device.rules.length; j++) {
+ var rule = device.rules[j];
+ if (this.matchRule_(rule, userAgent, width, height)) {
+ console.log('Rule matched:');
+ console.log(rule);
+ matched = true;
+ break;
+ }
+ }
+ if (!matched) continue;
+
+ // device.dpi might be an array of [ xdpi, ydpi] or just a scalar.
+ var xdpi = device.dpi[0] || device.dpi;
+ var ydpi = device.dpi[1] || device.dpi;
+
+ return new DeviceParams({ xdpi: xdpi, ydpi: ydpi, bevelMm: device.bw });
+ }
+
+ console.warn('No DPDB device match.');
+ return null;
+};
+
+Dpdb.prototype.matchRule_ = function(rule, ua, screenWidth, screenHeight) {
+ // We can only match 'ua' and 'res' rules, not other types like 'mdmh'
+ // (which are meant for native platforms).
+ if (!rule.ua && !rule.res) return false;
+
+ // If our user agent string doesn't contain the indicated user agent string,
+ // the match fails.
+ if (rule.ua && ua.indexOf(rule.ua) < 0) return false;
+
+ // If the rule specifies screen dimensions that don't correspond to ours,
+ // the match fails.
+ if (rule.res) {
+ if (!rule.res[0] || !rule.res[1]) return false;
+ var resX = rule.res[0];
+ var resY = rule.res[1];
+ // Compare min and max so as to make the order not matter, i.e., it should
+ // be true that 640x480 == 480x640.
+ if (Math.min(screenWidth, screenHeight) != Math.min(resX, resY) ||
+ (Math.max(screenWidth, screenHeight) != Math.max(resX, resY))) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+function DeviceParams(params) {
+ this.xdpi = params.xdpi;
+ this.ydpi = params.ydpi;
+ this.bevelMm = params.bevelMm;
+}
+
+module.exports = Dpdb;
+},{"../util.js":22,"./dpdb-cache.js":10}],12:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+function Emitter() {
+ this.callbacks = {};
+}
+
+Emitter.prototype.emit = function(eventName) {
+ var callbacks = this.callbacks[eventName];
+ if (!callbacks) {
+ //console.log('No valid callback specified.');
+ return;
+ }
+ var args = [].slice.call(arguments);
+ // Eliminate the first param (the callback).
+ args.shift();
+ for (var i = 0; i < callbacks.length; i++) {
+ callbacks[i].apply(this, args);
+ }
+};
+
+Emitter.prototype.on = function(eventName, callback) {
+ if (eventName in this.callbacks) {
+ this.callbacks[eventName].push(callback);
+ } else {
+ this.callbacks[eventName] = [callback];
+ }
+};
+
+module.exports = Emitter;
+
+},{}],13:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var Util = _dereq_('./util.js');
+var WebVRPolyfill = _dereq_('./webvr-polyfill.js').WebVRPolyfill;
+var InstallWebVRSpecShim = _dereq_('./webvr-polyfill.js').InstallWebVRSpecShim;
+
+// Initialize a WebVRConfig just in case.
+window.WebVRConfig = Util.extend({
+ // Forces availability of VR mode, even for non-mobile devices.
+ FORCE_ENABLE_VR: false,
+
+ // Complementary filter coefficient. 0 for accelerometer, 1 for gyro.
+ K_FILTER: 0.98,
+
+ // How far into the future to predict during fast motion (in seconds).
+ PREDICTION_TIME_S: 0.040,
+
+ // Flag to disable touch panner. In case you have your own touch controls.
+ TOUCH_PANNER_DISABLED: false,
+
+ // Flag to disabled the UI in VR Mode.
+ CARDBOARD_UI_DISABLED: false, // Default: false
+
+ // Flag to disable the instructions to rotate your device.
+ ROTATE_INSTRUCTIONS_DISABLED: false, // Default: false.
+
+ // Enable yaw panning only, disabling roll and pitch. This can be useful
+ // for panoramas with nothing interesting above or below.
+ YAW_ONLY: false,
+
+ // To disable keyboard and mouse controls, if you want to use your own
+ // implementation.
+ MOUSE_KEYBOARD_CONTROLS_DISABLED: false,
+
+ // Prevent the polyfill from initializing immediately. Requires the app
+ // to call InitializeWebVRPolyfill() before it can be used.
+ DEFER_INITIALIZATION: false,
+
+ // Enable the deprecated version of the API (navigator.getVRDevices).
+ ENABLE_DEPRECATED_API: false,
+
+ // Scales the recommended buffer size reported by WebVR, which can improve
+ // performance.
+ // UPDATE(2016-05-03): Setting this to 0.5 by default since 1.0 does not
+ // perform well on many mobile devices.
+ BUFFER_SCALE: 0.5,
+
+ // Allow VRDisplay.submitFrame to change gl bindings, which is more
+ // efficient if the application code will re-bind its resources on the
+ // next frame anyway. This has been seen to cause rendering glitches with
+ // THREE.js.
+ // Dirty bindings include: gl.FRAMEBUFFER_BINDING, gl.CURRENT_PROGRAM,
+ // gl.ARRAY_BUFFER_BINDING, gl.ELEMENT_ARRAY_BUFFER_BINDING,
+ // and gl.TEXTURE_BINDING_2D for texture unit 0.
+ DIRTY_SUBMIT_FRAME_BINDINGS: false
+}, window.WebVRConfig);
+
+if (!window.WebVRConfig.DEFER_INITIALIZATION) {
+ new WebVRPolyfill();
+} else {
+ window.InitializeWebVRPolyfill = function() {
+ new WebVRPolyfill();
+ }
+ // Call this if you want to use the shim without the rest of the polyfill.
+ // InitializeWebVRPolyfill() will install the shim automatically when needed,
+ // so this should rarely be used.
+ window.InitializeSpecShim = function() {
+ InstallWebVRSpecShim();
+ }
+}
+
+},{"./util.js":22,"./webvr-polyfill.js":25}],14:[function(_dereq_,module,exports){
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var MathUtil = window.MathUtil || {};
+
+MathUtil.degToRad = Math.PI / 180;
+MathUtil.radToDeg = 180 / Math.PI;
+
+// Some minimal math functionality borrowed from THREE.Math and stripped down
+// for the purposes of this library.
+
+
+MathUtil.Vector2 = function ( x, y ) {
+ this.x = x || 0;
+ this.y = y || 0;
+};
+
+MathUtil.Vector2.prototype = {
+ constructor: MathUtil.Vector2,
+
+ set: function ( x, y ) {
+ this.x = x;
+ this.y = y;
+
+ return this;
+ },
+
+ copy: function ( v ) {
+ this.x = v.x;
+ this.y = v.y;
+
+ return this;
+ },
+
+ subVectors: function ( a, b ) {
+ this.x = a.x - b.x;
+ this.y = a.y - b.y;
+
+ return this;
+ },
+};
+
+MathUtil.Vector3 = function ( x, y, z ) {
+ this.x = x || 0;
+ this.y = y || 0;
+ this.z = z || 0;
+};
+
+MathUtil.Vector3.prototype = {
+ constructor: MathUtil.Vector3,
+
+ set: function ( x, y, z ) {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+
+ return this;
+ },
+
+ copy: function ( v ) {
+ this.x = v.x;
+ this.y = v.y;
+ this.z = v.z;
+
+ return this;
+ },
+
+ length: function () {
+ return Math.sqrt( this.x * this.x + this.y * this.y + this.z * this.z );
+ },
+
+ normalize: function () {
+ var scalar = this.length();
+
+ if ( scalar !== 0 ) {
+ var invScalar = 1 / scalar;
+
+ this.multiplyScalar(invScalar);
+ } else {
+ this.x = 0;
+ this.y = 0;
+ this.z = 0;
+ }
+
+ return this;
+ },
+
+ multiplyScalar: function ( scalar ) {
+ this.x *= scalar;
+ this.y *= scalar;
+ this.z *= scalar;
+ },
+
+ applyQuaternion: function ( q ) {
+ var x = this.x;
+ var y = this.y;
+ var z = this.z;
+
+ var qx = q.x;
+ var qy = q.y;
+ var qz = q.z;
+ var qw = q.w;
+
+ // calculate quat * vector
+ var ix = qw * x + qy * z - qz * y;
+ var iy = qw * y + qz * x - qx * z;
+ var iz = qw * z + qx * y - qy * x;
+ var iw = - qx * x - qy * y - qz * z;
+
+ // calculate result * inverse quat
+ this.x = ix * qw + iw * - qx + iy * - qz - iz * - qy;
+ this.y = iy * qw + iw * - qy + iz * - qx - ix * - qz;
+ this.z = iz * qw + iw * - qz + ix * - qy - iy * - qx;
+
+ return this;
+ },
+
+ dot: function ( v ) {
+ return this.x * v.x + this.y * v.y + this.z * v.z;
+ },
+
+ crossVectors: function ( a, b ) {
+ var ax = a.x, ay = a.y, az = a.z;
+ var bx = b.x, by = b.y, bz = b.z;
+
+ this.x = ay * bz - az * by;
+ this.y = az * bx - ax * bz;
+ this.z = ax * by - ay * bx;
+
+ return this;
+ },
+};
+
+MathUtil.Quaternion = function ( x, y, z, w ) {
+ this.x = x || 0;
+ this.y = y || 0;
+ this.z = z || 0;
+ this.w = ( w !== undefined ) ? w : 1;
+};
+
+MathUtil.Quaternion.prototype = {
+ constructor: MathUtil.Quaternion,
+
+ set: function ( x, y, z, w ) {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ this.w = w;
+
+ return this;
+ },
+
+ copy: function ( quaternion ) {
+ this.x = quaternion.x;
+ this.y = quaternion.y;
+ this.z = quaternion.z;
+ this.w = quaternion.w;
+
+ return this;
+ },
+
+ setFromEulerXYZ: function( x, y, z ) {
+ var c1 = Math.cos( x / 2 );
+ var c2 = Math.cos( y / 2 );
+ var c3 = Math.cos( z / 2 );
+ var s1 = Math.sin( x / 2 );
+ var s2 = Math.sin( y / 2 );
+ var s3 = Math.sin( z / 2 );
+
+ this.x = s1 * c2 * c3 + c1 * s2 * s3;
+ this.y = c1 * s2 * c3 - s1 * c2 * s3;
+ this.z = c1 * c2 * s3 + s1 * s2 * c3;
+ this.w = c1 * c2 * c3 - s1 * s2 * s3;
+
+ return this;
+ },
+
+ setFromEulerYXZ: function( x, y, z ) {
+ var c1 = Math.cos( x / 2 );
+ var c2 = Math.cos( y / 2 );
+ var c3 = Math.cos( z / 2 );
+ var s1 = Math.sin( x / 2 );
+ var s2 = Math.sin( y / 2 );
+ var s3 = Math.sin( z / 2 );
+
+ this.x = s1 * c2 * c3 + c1 * s2 * s3;
+ this.y = c1 * s2 * c3 - s1 * c2 * s3;
+ this.z = c1 * c2 * s3 - s1 * s2 * c3;
+ this.w = c1 * c2 * c3 + s1 * s2 * s3;
+
+ return this;
+ },
+
+ setFromAxisAngle: function ( axis, angle ) {
+ // http://www.euclideanspace.com/maths/geometry/rotations/conversions/angleToQuaternion/index.htm
+ // assumes axis is normalized
+
+ var halfAngle = angle / 2, s = Math.sin( halfAngle );
+
+ this.x = axis.x * s;
+ this.y = axis.y * s;
+ this.z = axis.z * s;
+ this.w = Math.cos( halfAngle );
+
+ return this;
+ },
+
+ multiply: function ( q ) {
+ return this.multiplyQuaternions( this, q );
+ },
+
+ multiplyQuaternions: function ( a, b ) {
+ // from http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/code/index.htm
+
+ var qax = a.x, qay = a.y, qaz = a.z, qaw = a.w;
+ var qbx = b.x, qby = b.y, qbz = b.z, qbw = b.w;
+
+ this.x = qax * qbw + qaw * qbx + qay * qbz - qaz * qby;
+ this.y = qay * qbw + qaw * qby + qaz * qbx - qax * qbz;
+ this.z = qaz * qbw + qaw * qbz + qax * qby - qay * qbx;
+ this.w = qaw * qbw - qax * qbx - qay * qby - qaz * qbz;
+
+ return this;
+ },
+
+ inverse: function () {
+ this.x *= -1;
+ this.y *= -1;
+ this.z *= -1;
+
+ this.normalize();
+
+ return this;
+ },
+
+ normalize: function () {
+ var l = Math.sqrt( this.x * this.x + this.y * this.y + this.z * this.z + this.w * this.w );
+
+ if ( l === 0 ) {
+ this.x = 0;
+ this.y = 0;
+ this.z = 0;
+ this.w = 1;
+ } else {
+ l = 1 / l;
+
+ this.x = this.x * l;
+ this.y = this.y * l;
+ this.z = this.z * l;
+ this.w = this.w * l;
+ }
+
+ return this;
+ },
+
+ slerp: function ( qb, t ) {
+ if ( t === 0 ) return this;
+ if ( t === 1 ) return this.copy( qb );
+
+ var x = this.x, y = this.y, z = this.z, w = this.w;
+
+ // http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/slerp/
+
+ var cosHalfTheta = w * qb.w + x * qb.x + y * qb.y + z * qb.z;
+
+ if ( cosHalfTheta < 0 ) {
+ this.w = - qb.w;
+ this.x = - qb.x;
+ this.y = - qb.y;
+ this.z = - qb.z;
+
+ cosHalfTheta = - cosHalfTheta;
+ } else {
+ this.copy( qb );
+ }
+
+ if ( cosHalfTheta >= 1.0 ) {
+ this.w = w;
+ this.x = x;
+ this.y = y;
+ this.z = z;
+
+ return this;
+ }
+
+ var halfTheta = Math.acos( cosHalfTheta );
+ var sinHalfTheta = Math.sqrt( 1.0 - cosHalfTheta * cosHalfTheta );
+
+ if ( Math.abs( sinHalfTheta ) < 0.001 ) {
+ this.w = 0.5 * ( w + this.w );
+ this.x = 0.5 * ( x + this.x );
+ this.y = 0.5 * ( y + this.y );
+ this.z = 0.5 * ( z + this.z );
+
+ return this;
+ }
+
+ var ratioA = Math.sin( ( 1 - t ) * halfTheta ) / sinHalfTheta,
+ ratioB = Math.sin( t * halfTheta ) / sinHalfTheta;
+
+ this.w = ( w * ratioA + this.w * ratioB );
+ this.x = ( x * ratioA + this.x * ratioB );
+ this.y = ( y * ratioA + this.y * ratioB );
+ this.z = ( z * ratioA + this.z * ratioB );
+
+ return this;
+ },
+
+ setFromUnitVectors: function () {
+ // http://lolengine.net/blog/2014/02/24/quaternion-from-two-vectors-final
+ // assumes direction vectors vFrom and vTo are normalized
+
+ var v1, r;
+ var EPS = 0.000001;
+
+ return function ( vFrom, vTo ) {
+ if ( v1 === undefined ) v1 = new MathUtil.Vector3();
+
+ r = vFrom.dot( vTo ) + 1;
+
+ if ( r < EPS ) {
+ r = 0;
+
+ if ( Math.abs( vFrom.x ) > Math.abs( vFrom.z ) ) {
+ v1.set( - vFrom.y, vFrom.x, 0 );
+ } else {
+ v1.set( 0, - vFrom.z, vFrom.y );
+ }
+ } else {
+ v1.crossVectors( vFrom, vTo );
+ }
+
+ this.x = v1.x;
+ this.y = v1.y;
+ this.z = v1.z;
+ this.w = r;
+
+ this.normalize();
+
+ return this;
+ }
+ }(),
+};
+
+module.exports = MathUtil;
+
+},{}],15:[function(_dereq_,module,exports){
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var VRDisplay = _dereq_('./base.js').VRDisplay;
+var MathUtil = _dereq_('./math-util.js');
+var Util = _dereq_('./util.js');
+
+// How much to rotate per key stroke.
+var KEY_SPEED = 0.15;
+var KEY_ANIMATION_DURATION = 80;
+
+// How much to rotate for mouse events.
+var MOUSE_SPEED_X = 0.5;
+var MOUSE_SPEED_Y = 0.3;
+
+/**
+ * VRDisplay based on mouse and keyboard input. Designed for desktops/laptops
+ * where orientation events aren't supported. Cannot present.
+ */
+function MouseKeyboardVRDisplay() {
+ this.displayName = 'Mouse and Keyboard VRDisplay (webvr-polyfill)';
+
+ this.capabilities.hasOrientation = true;
+
+ // Attach to mouse and keyboard events.
+ window.addEventListener('keydown', this.onKeyDown_.bind(this));
+ window.addEventListener('mousemove', this.onMouseMove_.bind(this));
+ window.addEventListener('mousedown', this.onMouseDown_.bind(this));
+ window.addEventListener('mouseup', this.onMouseUp_.bind(this));
+
+ // "Private" members.
+ this.phi_ = 0;
+ this.theta_ = 0;
+
+ // Variables for keyboard-based rotation animation.
+ this.targetAngle_ = null;
+ this.angleAnimation_ = null;
+
+ // State variables for calculations.
+ this.orientation_ = new MathUtil.Quaternion();
+
+ // Variables for mouse-based rotation.
+ this.rotateStart_ = new MathUtil.Vector2();
+ this.rotateEnd_ = new MathUtil.Vector2();
+ this.rotateDelta_ = new MathUtil.Vector2();
+ this.isDragging_ = false;
+
+ this.orientationOut_ = new Float32Array(4);
+}
+MouseKeyboardVRDisplay.prototype = new VRDisplay();
+
+MouseKeyboardVRDisplay.prototype.getImmediatePose = function() {
+ this.orientation_.setFromEulerYXZ(this.phi_, this.theta_, 0);
+
+ this.orientationOut_[0] = this.orientation_.x;
+ this.orientationOut_[1] = this.orientation_.y;
+ this.orientationOut_[2] = this.orientation_.z;
+ this.orientationOut_[3] = this.orientation_.w;
+
+ return {
+ position: null,
+ orientation: this.orientationOut_,
+ linearVelocity: null,
+ linearAcceleration: null,
+ angularVelocity: null,
+ angularAcceleration: null
+ };
+};
+
+MouseKeyboardVRDisplay.prototype.onKeyDown_ = function(e) {
+ // Track WASD and arrow keys.
+ if (e.keyCode == 38) { // Up key.
+ this.animatePhi_(this.phi_ + KEY_SPEED);
+ } else if (e.keyCode == 39) { // Right key.
+ this.animateTheta_(this.theta_ - KEY_SPEED);
+ } else if (e.keyCode == 40) { // Down key.
+ this.animatePhi_(this.phi_ - KEY_SPEED);
+ } else if (e.keyCode == 37) { // Left key.
+ this.animateTheta_(this.theta_ + KEY_SPEED);
+ }
+};
+
+MouseKeyboardVRDisplay.prototype.animateTheta_ = function(targetAngle) {
+ this.animateKeyTransitions_('theta_', targetAngle);
+};
+
+MouseKeyboardVRDisplay.prototype.animatePhi_ = function(targetAngle) {
+ // Prevent looking too far up or down.
+ targetAngle = Util.clamp(targetAngle, -Math.PI/2, Math.PI/2);
+ this.animateKeyTransitions_('phi_', targetAngle);
+};
+
+/**
+ * Start an animation to transition an angle from one value to another.
+ */
+MouseKeyboardVRDisplay.prototype.animateKeyTransitions_ = function(angleName, targetAngle) {
+ // If an animation is currently running, cancel it.
+ if (this.angleAnimation_) {
+ cancelAnimationFrame(this.angleAnimation_);
+ }
+ var startAngle = this[angleName];
+ var startTime = new Date();
+ // Set up an interval timer to perform the animation.
+ this.angleAnimation_ = requestAnimationFrame(function animate() {
+ // Once we're finished the animation, we're done.
+ var elapsed = new Date() - startTime;
+ if (elapsed >= KEY_ANIMATION_DURATION) {
+ this[angleName] = targetAngle;
+ cancelAnimationFrame(this.angleAnimation_);
+ return;
+ }
+ // loop with requestAnimationFrame
+ this.angleAnimation_ = requestAnimationFrame(animate.bind(this))
+ // Linearly interpolate the angle some amount.
+ var percent = elapsed / KEY_ANIMATION_DURATION;
+ this[angleName] = startAngle + (targetAngle - startAngle) * percent;
+ }.bind(this));
+};
+
+MouseKeyboardVRDisplay.prototype.onMouseDown_ = function(e) {
+ this.rotateStart_.set(e.clientX, e.clientY);
+ this.isDragging_ = true;
+};
+
+// Very similar to https://gist.github.com/mrflix/8351020
+MouseKeyboardVRDisplay.prototype.onMouseMove_ = function(e) {
+ if (!this.isDragging_ && !this.isPointerLocked_()) {
+ return;
+ }
+ // Support pointer lock API.
+ if (this.isPointerLocked_()) {
+ var movementX = e.movementX || e.mozMovementX || 0;
+ var movementY = e.movementY || e.mozMovementY || 0;
+ this.rotateEnd_.set(this.rotateStart_.x - movementX, this.rotateStart_.y - movementY);
+ } else {
+ this.rotateEnd_.set(e.clientX, e.clientY);
+ }
+ // Calculate how much we moved in mouse space.
+ this.rotateDelta_.subVectors(this.rotateEnd_, this.rotateStart_);
+ this.rotateStart_.copy(this.rotateEnd_);
+
+ // Keep track of the cumulative euler angles.
+ this.phi_ += 2 * Math.PI * this.rotateDelta_.y / screen.height * MOUSE_SPEED_Y;
+ this.theta_ += 2 * Math.PI * this.rotateDelta_.x / screen.width * MOUSE_SPEED_X;
+
+ // Prevent looking too far up or down.
+ this.phi_ = Util.clamp(this.phi_, -Math.PI/2, Math.PI/2);
+};
+
+MouseKeyboardVRDisplay.prototype.onMouseUp_ = function(e) {
+ this.isDragging_ = false;
+};
+
+MouseKeyboardVRDisplay.prototype.isPointerLocked_ = function() {
+ var el = document.pointerLockElement || document.mozPointerLockElement ||
+ document.webkitPointerLockElement;
+ return el !== undefined;
+};
+
+MouseKeyboardVRDisplay.prototype.resetPose = function() {
+ this.phi_ = 0;
+ this.theta_ = 0;
+};
+
+module.exports = MouseKeyboardVRDisplay;
+
+},{"./base.js":2,"./math-util.js":14,"./util.js":22}],16:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var Util = _dereq_('./util.js');
+
+function RotateInstructions() {
+ this.loadIcon_();
+
+ var overlay = document.createElement('div');
+ var s = overlay.style;
+ s.position = 'fixed';
+ s.top = 0;
+ s.right = 0;
+ s.bottom = 0;
+ s.left = 0;
+ s.backgroundColor = 'gray';
+ s.fontFamily = 'sans-serif';
+ // Force this to be above the fullscreen canvas, which is at zIndex: 999999.
+ s.zIndex = 1000000;
+
+ var img = document.createElement('img');
+ img.src = this.icon;
+ var s = img.style;
+ s.marginLeft = '25%';
+ s.marginTop = '25%';
+ s.width = '50%';
+ overlay.appendChild(img);
+
+ var text = document.createElement('div');
+ var s = text.style;
+ s.textAlign = 'center';
+ s.fontSize = '16px';
+ s.lineHeight = '24px';
+ s.margin = '24px 25%';
+ s.width = '50%';
+ text.innerHTML = 'Place your phone into your Cardboard viewer.';
+ overlay.appendChild(text);
+
+ var snackbar = document.createElement('div');
+ var s = snackbar.style;
+ s.backgroundColor = '#CFD8DC';
+ s.position = 'fixed';
+ s.bottom = 0;
+ s.width = '100%';
+ s.height = '48px';
+ s.padding = '14px 24px';
+ s.boxSizing = 'border-box';
+ s.color = '#656A6B';
+ overlay.appendChild(snackbar);
+
+ var snackbarText = document.createElement('div');
+ snackbarText.style.float = 'left';
+ snackbarText.innerHTML = 'No Cardboard viewer?';
+
+ var snackbarButton = document.createElement('a');
+ snackbarButton.href = 'https://www.google.com/get/cardboard/get-cardboard/';
+ snackbarButton.innerHTML = 'get one';
+ snackbarButton.target = '_blank';
+ var s = snackbarButton.style;
+ s.float = 'right';
+ s.fontWeight = 600;
+ s.textTransform = 'uppercase';
+ s.borderLeft = '1px solid gray';
+ s.paddingLeft = '24px';
+ s.textDecoration = 'none';
+ s.color = '#656A6B';
+
+ snackbar.appendChild(snackbarText);
+ snackbar.appendChild(snackbarButton);
+
+ this.overlay = overlay;
+ this.text = text;
+
+ this.hide();
+}
+
+RotateInstructions.prototype.show = function(parent) {
+ if (!parent && !this.overlay.parentElement) {
+ document.body.appendChild(this.overlay);
+ } else if (parent) {
+ if (this.overlay.parentElement && this.overlay.parentElement != parent)
+ this.overlay.parentElement.removeChild(this.overlay);
+
+ parent.appendChild(this.overlay);
+ }
+
+ this.overlay.style.display = 'block';
+
+ var img = this.overlay.querySelector('img');
+ var s = img.style;
+
+ if (Util.isLandscapeMode()) {
+ s.width = '20%';
+ s.marginLeft = '40%';
+ s.marginTop = '3%';
+ } else {
+ s.width = '50%';
+ s.marginLeft = '25%';
+ s.marginTop = '25%';
+ }
+};
+
+RotateInstructions.prototype.hide = function() {
+ this.overlay.style.display = 'none';
+};
+
+RotateInstructions.prototype.showTemporarily = function(ms, parent) {
+ this.show(parent);
+ this.timer = setTimeout(this.hide.bind(this), ms);
+};
+
+RotateInstructions.prototype.disableShowTemporarily = function() {
+ clearTimeout(this.timer);
+};
+
+RotateInstructions.prototype.update = function() {
+ this.disableShowTemporarily();
+ // In portrait VR mode, tell the user to rotate to landscape. Otherwise, hide
+ // the instructions.
+ if (!Util.isLandscapeMode() && Util.isMobile()) {
+ this.show();
+ } else {
+ this.hide();
+ }
+};
+
+RotateInstructions.prototype.loadIcon_ = function() {
+ // Encoded asset_src/rotate-instructions.svg
+ this.icon = Util.base64('image/svg+xml', '<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg width="198px" height="240px" viewBox="0 0 198 240" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:sketch="http://www.bohemiancoding.com/sketch/ns">
    <!-- Generator: Sketch 3.3.3 (12081) - http://www.bohemiancoding.com/sketch -->
    <title>transition</title>
    <desc>Created with Sketch.</desc>
    <defs></defs>
    <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" sketch:type="MSPage">
        <g id="transition" sketch:type="MSArtboardGroup">
            <g id="Imported-Layers-Copy-4-+-Imported-Layers-Copy-+-Imported-Layers-Copy-2-Copy" sketch:type="MSLayerGroup">
                <g id="Imported-Layers-Copy-4" transform="translate(0.000000, 107.000000)" sketch:type="MSShapeGroup">
                    <path d="M149.625,2.527 C149.625,2.527 155.805,6.096 156.362,6.418 L156.362,7.304 C156.362,7.481 156.375,7.664 156.4,7.853 C156.41,7.934 156.42,8.015 156.427,8.095 C156.567,9.51 157.401,11.093 158.532,12.094 L164.252,17.156 L164.333,17.066 C164.333,17.066 168.715,14.536 169.568,14.042 C171.025,14.883 195.538,29.035 195.538,29.035 L195.538,83.036 C195.538,83.807 195.152,84.253 194.59,84.253 C194.357,84.253 194.095,84.177 193.818,84.017 L169.851,70.179 L169.837,70.203 L142.515,85.978 L141.665,84.655 C136.934,83.126 131.917,81.915 126.714,81.045 C126.709,81.06 126.707,81.069 126.707,81.069 L121.64,98.03 L113.749,102.586 L113.712,102.523 L113.712,130.113 C113.712,130.885 113.326,131.33 112.764,131.33 C112.532,131.33 112.269,131.254 111.992,131.094 L69.519,106.572 C68.569,106.023 67.799,104.695 67.799,103.605 L67.799,102.57 L67.778,102.617 C67.27,102.393 66.648,102.249 65.962,102.218 C65.875,102.214 65.788,102.212 65.701,102.212 C65.606,102.212 65.511,102.215 65.416,102.219 C65.195,102.229 64.974,102.235 64.754,102.235 C64.331,102.235 63.911,102.216 63.498,102.178 C61.843,102.025 60.298,101.578 59.094,100.882 L12.518,73.992 L12.523,74.004 L2.245,55.254 C1.244,53.427 2.004,51.038 3.943,49.918 L59.954,17.573 C60.626,17.185 61.35,17.001 62.053,17.001 C63.379,17.001 64.625,17.66 65.28,18.854 L65.285,18.851 L65.512,19.264 L65.506,19.268 C65.909,20.003 66.405,20.68 66.983,21.286 L67.26,21.556 C69.174,23.406 71.728,24.357 74.373,24.357 C76.322,24.357 78.321,23.84 80.148,22.785 C80.161,22.785 87.467,18.566 87.467,18.566 C88.139,18.178 88.863,17.994 89.566,17.994 C90.892,17.994 92.138,18.652 92.792,19.847 L96.042,25.775 L96.064,25.757 L102.849,29.674 L102.744,29.492 L149.625,2.527 M149.625,0.892 C149.343,0.892 149.062,0.965 148.81,1.11 L102.641,27.666 L97.231,24.542 L94.226,19.061 C93.313,17.394 91.527,16.359 89.566,16.358 C88.555,16.358 87.546,16.632 86.649,17.15 C83.878,18.75 79.687,21.169 79.374,21.345 C79.359,21.353 79.345,21.361 79.33,21.369 C77.798,22.254 76.084,22.722 74.373,22.722 C72.081,22.722 69.959,21.89 68.397,20.38 L68.145,20.135 C67.706,19.672 67.323,19.156 67.006,18.601 C66.988,18.559 66.968,18.519 66.946,18.479 L66.719,18.065 C66.69,18.012 66.658,17.96 66.624,17.911 C65.686,16.337 63.951,15.366 62.053,15.366 C61.042,15.366 60.033,15.64 59.136,16.158 L3.125,48.502 C0.426,50.061 -0.613,53.442 0.811,56.04 L11.089,74.79 C11.266,75.113 11.537,75.353 11.85,75.494 L58.276,102.298 C59.679,103.108 61.433,103.63 63.348,103.806 C63.812,103.848 64.285,103.87 64.754,103.87 C65,103.87 65.249,103.864 65.494,103.852 C65.563,103.849 65.632,103.847 65.701,103.847 C65.764,103.847 65.828,103.849 65.89,103.852 C65.986,103.856 66.08,103.863 66.173,103.874 C66.282,105.467 67.332,107.197 68.702,107.988 L111.174,132.51 C111.698,132.812 112.232,132.965 112.764,132.965 C114.261,132.965 115.347,131.765 115.347,130.113 L115.347,103.551 L122.458,99.446 C122.819,99.237 123.087,98.898 123.207,98.498 L127.865,82.905 C132.279,83.702 136.557,84.753 140.607,86.033 L141.14,86.862 C141.451,87.346 141.977,87.613 142.516,87.613 C142.794,87.613 143.076,87.542 143.333,87.393 L169.865,72.076 L193,85.433 C193.523,85.735 194.058,85.888 194.59,85.888 C196.087,85.888 197.173,84.689 197.173,83.036 L197.173,29.035 C197.173,28.451 196.861,27.911 196.355,27.619 C196.355,27.619 171.843,13.467 170.385,12.626 C170.132,12.48 169.85,12.407 169.568,12.407 C169.285,12.407 169.002,12.481 168.749,12.627 C168.143,12.978 165.756,14.357 164.424,15.125 L159.615,10.87 C158.796,10.145 158.154,8.937 158.054,7.934 C158.045,7.837 158.034,7.739 158.021,7.64 C158.005,7.523 157.998,7.41 157.998,7.304 L157.998,6.418 C157.998,5.834 157.686,5.295 157.181,5.002 C156.624,4.68 150.442,1.111 150.442,1.111 C150.189,0.965 149.907,0.892 149.625,0.892" id="Fill-1" fill="#455A64"></path>
                    <path d="M96.027,25.636 L142.603,52.527 C143.807,53.222 144.582,54.114 144.845,55.068 L144.835,55.075 L63.461,102.057 L63.46,102.057 C61.806,101.905 60.261,101.457 59.057,100.762 L12.481,73.871 L96.027,25.636" id="Fill-2" fill="#FAFAFA"></path>
                    <path d="M63.461,102.174 C63.453,102.174 63.446,102.174 63.439,102.172 C61.746,102.016 60.211,101.563 58.998,100.863 L12.422,73.973 C12.386,73.952 12.364,73.914 12.364,73.871 C12.364,73.83 12.386,73.791 12.422,73.77 L95.968,25.535 C96.004,25.514 96.049,25.514 96.085,25.535 L142.661,52.426 C143.888,53.134 144.682,54.038 144.957,55.037 C144.97,55.083 144.953,55.133 144.915,55.161 C144.911,55.165 144.898,55.174 144.894,55.177 L63.519,102.158 C63.501,102.169 63.481,102.174 63.461,102.174 L63.461,102.174 Z M12.714,73.871 L59.115,100.661 C60.293,101.341 61.786,101.782 63.435,101.937 L144.707,55.015 C144.428,54.108 143.682,53.285 142.544,52.628 L96.027,25.771 L12.714,73.871 L12.714,73.871 Z" id="Fill-3" fill="#607D8B"></path>
                    <path d="M148.327,58.471 C148.145,58.48 147.962,58.48 147.781,58.472 C145.887,58.389 144.479,57.434 144.636,56.34 C144.689,55.967 144.664,55.597 144.564,55.235 L63.461,102.057 C64.089,102.115 64.733,102.13 65.379,102.099 C65.561,102.09 65.743,102.09 65.925,102.098 C67.819,102.181 69.227,103.136 69.07,104.23 L148.327,58.471" id="Fill-4" fill="#FFFFFF"></path>
                    <path d="M69.07,104.347 C69.048,104.347 69.025,104.34 69.005,104.327 C68.968,104.301 68.948,104.257 68.955,104.213 C69,103.896 68.898,103.576 68.658,103.288 C68.153,102.678 67.103,102.266 65.92,102.214 C65.742,102.206 65.563,102.207 65.385,102.215 C64.742,102.246 64.087,102.232 63.45,102.174 C63.399,102.169 63.358,102.132 63.347,102.082 C63.336,102.033 63.358,101.981 63.402,101.956 L144.506,55.134 C144.537,55.116 144.575,55.113 144.609,55.127 C144.642,55.141 144.668,55.17 144.677,55.204 C144.781,55.585 144.806,55.972 144.751,56.357 C144.706,56.673 144.808,56.994 145.047,57.282 C145.553,57.892 146.602,58.303 147.786,58.355 C147.964,58.363 148.143,58.363 148.321,58.354 C148.377,58.352 148.424,58.387 148.439,58.438 C148.454,58.49 148.432,58.545 148.385,58.572 L69.129,104.331 C69.111,104.342 69.09,104.347 69.07,104.347 L69.07,104.347 Z M65.665,101.975 C65.754,101.975 65.842,101.977 65.93,101.981 C67.196,102.037 68.283,102.469 68.838,103.139 C69.065,103.413 69.188,103.714 69.198,104.021 L147.883,58.592 C147.847,58.592 147.811,58.591 147.776,58.589 C146.509,58.533 145.422,58.1 144.867,57.431 C144.585,57.091 144.465,56.707 144.52,56.324 C144.563,56.021 144.552,55.716 144.488,55.414 L63.846,101.97 C64.353,102.002 64.867,102.006 65.374,101.982 C65.471,101.977 65.568,101.975 65.665,101.975 L65.665,101.975 Z" id="Fill-5" fill="#607D8B"></path>
                    <path d="M2.208,55.134 C1.207,53.307 1.967,50.917 3.906,49.797 L59.917,17.453 C61.856,16.333 64.241,16.907 65.243,18.734 L65.475,19.144 C65.872,19.882 66.368,20.56 66.945,21.165 L67.223,21.435 C70.548,24.649 75.806,25.151 80.111,22.665 L87.43,18.445 C89.37,17.326 91.754,17.899 92.755,19.727 L96.005,25.655 L12.486,73.884 L2.208,55.134 Z" id="Fill-6" fill="#FAFAFA"></path>
                    <path d="M12.486,74.001 C12.476,74.001 12.465,73.999 12.455,73.996 C12.424,73.988 12.399,73.967 12.384,73.94 L2.106,55.19 C1.075,53.31 1.857,50.845 3.848,49.696 L59.858,17.352 C60.525,16.967 61.271,16.764 62.016,16.764 C63.431,16.764 64.666,17.466 65.327,18.646 C65.337,18.654 65.345,18.663 65.351,18.674 L65.578,19.088 C65.584,19.1 65.589,19.112 65.591,19.126 C65.985,19.838 66.469,20.497 67.03,21.085 L67.305,21.351 C69.151,23.137 71.649,24.12 74.336,24.12 C76.313,24.12 78.29,23.582 80.053,22.563 C80.064,22.557 80.076,22.553 80.088,22.55 L87.372,18.344 C88.038,17.959 88.784,17.756 89.529,17.756 C90.956,17.756 92.201,18.472 92.858,19.67 L96.107,25.599 C96.138,25.654 96.118,25.724 96.063,25.756 L12.545,73.985 C12.526,73.996 12.506,74.001 12.486,74.001 L12.486,74.001 Z M62.016,16.997 C61.312,16.997 60.606,17.19 59.975,17.554 L3.965,49.899 C2.083,50.985 1.341,53.308 2.31,55.078 L12.531,73.723 L95.848,25.611 L92.653,19.782 C92.038,18.66 90.87,17.99 89.529,17.99 C88.825,17.99 88.119,18.182 87.489,18.547 L80.172,22.772 C80.161,22.778 80.149,22.782 80.137,22.785 C78.346,23.811 76.341,24.354 74.336,24.354 C71.588,24.354 69.033,23.347 67.142,21.519 L66.864,21.249 C66.277,20.634 65.774,19.947 65.367,19.203 C65.36,19.192 65.356,19.179 65.354,19.166 L65.163,18.819 C65.154,18.811 65.146,18.801 65.14,18.79 C64.525,17.667 63.357,16.997 62.016,16.997 L62.016,16.997 Z" id="Fill-7" fill="#607D8B"></path>
                    <path d="M42.434,48.808 L42.434,48.808 C39.924,48.807 37.737,47.55 36.582,45.443 C34.771,42.139 36.144,37.809 39.641,35.789 L51.932,28.691 C53.103,28.015 54.413,27.658 55.721,27.658 C58.231,27.658 60.418,28.916 61.573,31.023 C63.384,34.327 62.012,38.657 58.514,40.677 L46.223,47.775 C45.053,48.45 43.742,48.808 42.434,48.808 L42.434,48.808 Z M55.721,28.125 C54.495,28.125 53.265,28.461 52.166,29.096 L39.875,36.194 C36.596,38.087 35.302,42.136 36.992,45.218 C38.063,47.173 40.098,48.34 42.434,48.34 C43.661,48.34 44.89,48.005 45.99,47.37 L58.281,40.272 C61.56,38.379 62.853,34.33 61.164,31.248 C60.092,29.293 58.058,28.125 55.721,28.125 L55.721,28.125 Z" id="Fill-8" fill="#607D8B"></path>
                    <path d="M149.588,2.407 C149.588,2.407 155.768,5.975 156.325,6.297 L156.325,7.184 C156.325,7.36 156.338,7.544 156.362,7.733 C156.373,7.814 156.382,7.894 156.39,7.975 C156.53,9.39 157.363,10.973 158.495,11.974 L165.891,18.519 C166.068,18.675 166.249,18.814 166.432,18.934 C168.011,19.974 169.382,19.4 169.494,17.652 C169.543,16.868 169.551,16.057 169.517,15.223 L169.514,15.063 L169.514,13.912 C170.78,14.642 195.501,28.915 195.501,28.915 L195.501,82.915 C195.501,84.005 194.731,84.445 193.781,83.897 L151.308,59.374 C150.358,58.826 149.588,57.497 149.588,56.408 L149.588,22.375" id="Fill-9" fill="#FAFAFA"></path>
                    <path d="M194.553,84.25 C194.296,84.25 194.013,84.165 193.722,83.997 L151.25,59.476 C150.269,58.909 149.471,57.533 149.471,56.408 L149.471,22.375 L149.705,22.375 L149.705,56.408 C149.705,57.459 150.45,58.744 151.366,59.274 L193.839,83.795 C194.263,84.04 194.655,84.083 194.942,83.917 C195.227,83.753 195.384,83.397 195.384,82.915 L195.384,28.982 C194.102,28.242 172.104,15.542 169.631,14.114 L169.634,15.22 C169.668,16.052 169.66,16.874 169.61,17.659 C169.556,18.503 169.214,19.123 168.647,19.405 C168.028,19.714 167.197,19.578 166.367,19.032 C166.181,18.909 165.995,18.766 165.814,18.606 L158.417,12.062 C157.259,11.036 156.418,9.437 156.274,7.986 C156.266,7.907 156.257,7.827 156.247,7.748 C156.221,7.555 156.209,7.365 156.209,7.184 L156.209,6.364 C155.375,5.883 149.529,2.508 149.529,2.508 L149.646,2.306 C149.646,2.306 155.827,5.874 156.384,6.196 L156.442,6.23 L156.442,7.184 C156.442,7.355 156.454,7.535 156.478,7.717 C156.489,7.8 156.499,7.882 156.507,7.963 C156.645,9.358 157.455,10.898 158.572,11.886 L165.969,18.431 C166.142,18.584 166.319,18.72 166.496,18.837 C167.254,19.336 168,19.467 168.543,19.196 C169.033,18.953 169.329,18.401 169.377,17.645 C169.427,16.867 169.434,16.054 169.401,15.228 L169.397,15.065 L169.397,13.71 L169.572,13.81 C170.839,14.541 195.559,28.814 195.559,28.814 L195.618,28.847 L195.618,82.915 C195.618,83.484 195.42,83.911 195.059,84.119 C194.908,84.206 194.737,84.25 194.553,84.25" id="Fill-10" fill="#607D8B"></path>
                    <path d="M145.685,56.161 L169.8,70.083 L143.822,85.081 L142.36,84.774 C135.826,82.604 128.732,81.046 121.341,80.158 C116.976,79.634 112.678,81.254 111.743,83.778 C111.506,84.414 111.503,85.071 111.732,85.706 C113.27,89.973 115.968,94.069 119.727,97.841 L120.259,98.686 C120.26,98.685 94.282,113.683 94.282,113.683 L70.167,99.761 L145.685,56.161" id="Fill-11" fill="#FFFFFF"></path>
                    <path d="M94.282,113.818 L94.223,113.785 L69.933,99.761 L70.108,99.66 L145.685,56.026 L145.743,56.059 L170.033,70.083 L143.842,85.205 L143.797,85.195 C143.772,85.19 142.336,84.888 142.336,84.888 C135.787,82.714 128.723,81.163 121.327,80.274 C120.788,80.209 120.236,80.177 119.689,80.177 C115.931,80.177 112.635,81.708 111.852,83.819 C111.624,84.432 111.621,85.053 111.842,85.667 C113.377,89.925 116.058,93.993 119.81,97.758 L119.826,97.779 L120.352,98.614 C120.354,98.617 120.356,98.62 120.358,98.624 L120.422,98.726 L120.317,98.787 C120.264,98.818 94.599,113.635 94.34,113.785 L94.282,113.818 L94.282,113.818 Z M70.401,99.761 L94.282,113.549 L119.084,99.229 C119.63,98.914 119.93,98.74 120.101,98.654 L119.635,97.914 C115.864,94.127 113.168,90.033 111.622,85.746 C111.382,85.079 111.386,84.404 111.633,83.738 C112.448,81.539 115.836,79.943 119.689,79.943 C120.246,79.943 120.806,79.976 121.355,80.042 C128.767,80.933 135.846,82.487 142.396,84.663 C143.232,84.838 143.611,84.917 143.786,84.967 L169.566,70.083 L145.685,56.295 L70.401,99.761 L70.401,99.761 Z" id="Fill-12" fill="#607D8B"></path>
                    <path d="M167.23,18.979 L167.23,69.85 L139.909,85.623 L133.448,71.456 C132.538,69.46 130.02,69.718 127.824,72.03 C126.769,73.14 125.931,74.585 125.494,76.048 L119.034,97.676 L91.712,113.45 L91.712,62.579 L167.23,18.979" id="Fill-13" fill="#FFFFFF"></path>
                    <path d="M91.712,113.567 C91.692,113.567 91.672,113.561 91.653,113.551 C91.618,113.53 91.595,113.492 91.595,113.45 L91.595,62.579 C91.595,62.537 91.618,62.499 91.653,62.478 L167.172,18.878 C167.208,18.857 167.252,18.857 167.288,18.878 C167.324,18.899 167.347,18.937 167.347,18.979 L167.347,69.85 C167.347,69.891 167.324,69.93 167.288,69.95 L139.967,85.725 C139.939,85.741 139.905,85.745 139.873,85.735 C139.842,85.725 139.816,85.702 139.802,85.672 L133.342,71.504 C132.967,70.682 132.28,70.229 131.408,70.229 C130.319,70.229 129.044,70.915 127.908,72.11 C126.874,73.2 126.034,74.647 125.606,76.082 L119.146,97.709 C119.137,97.738 119.118,97.762 119.092,97.777 L91.77,113.551 C91.752,113.561 91.732,113.567 91.712,113.567 L91.712,113.567 Z M91.829,62.647 L91.829,113.248 L118.935,97.598 L125.382,76.015 C125.827,74.525 126.664,73.081 127.739,71.95 C128.919,70.708 130.256,69.996 131.408,69.996 C132.377,69.996 133.139,70.497 133.554,71.407 L139.961,85.458 L167.113,69.782 L167.113,19.181 L91.829,62.647 L91.829,62.647 Z" id="Fill-14" fill="#607D8B"></path>
                    <path d="M168.543,19.213 L168.543,70.083 L141.221,85.857 L134.761,71.689 C133.851,69.694 131.333,69.951 129.137,72.263 C128.082,73.374 127.244,74.819 126.807,76.282 L120.346,97.909 L93.025,113.683 L93.025,62.813 L168.543,19.213" id="Fill-15" fill="#FFFFFF"></path>
                    <path d="M93.025,113.8 C93.005,113.8 92.984,113.795 92.966,113.785 C92.931,113.764 92.908,113.725 92.908,113.684 L92.908,62.813 C92.908,62.771 92.931,62.733 92.966,62.712 L168.484,19.112 C168.52,19.09 168.565,19.09 168.601,19.112 C168.637,19.132 168.66,19.171 168.66,19.212 L168.66,70.083 C168.66,70.125 168.637,70.164 168.601,70.184 L141.28,85.958 C141.251,85.975 141.217,85.979 141.186,85.968 C141.154,85.958 141.129,85.936 141.115,85.906 L134.655,71.738 C134.28,70.915 133.593,70.463 132.72,70.463 C131.632,70.463 130.357,71.148 129.221,72.344 C128.186,73.433 127.347,74.881 126.919,76.315 L120.458,97.943 C120.45,97.972 120.431,97.996 120.405,98.01 L93.083,113.785 C93.065,113.795 93.045,113.8 93.025,113.8 L93.025,113.8 Z M93.142,62.881 L93.142,113.481 L120.248,97.832 L126.695,76.248 C127.14,74.758 127.977,73.315 129.052,72.183 C130.231,70.942 131.568,70.229 132.72,70.229 C133.689,70.229 134.452,70.731 134.867,71.641 L141.274,85.692 L168.426,70.016 L168.426,19.415 L93.142,62.881 L93.142,62.881 Z" id="Fill-16" fill="#607D8B"></path>
                    <path d="M169.8,70.083 L142.478,85.857 L136.018,71.689 C135.108,69.694 132.59,69.951 130.393,72.263 C129.339,73.374 128.5,74.819 128.064,76.282 L121.603,97.909 L94.282,113.683 L94.282,62.813 L169.8,19.213 L169.8,70.083 Z" id="Fill-17" fill="#FAFAFA"></path>
                    <path d="M94.282,113.917 C94.241,113.917 94.201,113.907 94.165,113.886 C94.093,113.845 94.048,113.767 94.048,113.684 L94.048,62.813 C94.048,62.73 94.093,62.652 94.165,62.611 L169.683,19.01 C169.755,18.969 169.844,18.969 169.917,19.01 C169.989,19.052 170.033,19.129 170.033,19.212 L170.033,70.083 C170.033,70.166 169.989,70.244 169.917,70.285 L142.595,86.06 C142.538,86.092 142.469,86.1 142.407,86.08 C142.344,86.06 142.293,86.014 142.266,85.954 L135.805,71.786 C135.445,70.997 134.813,70.58 133.977,70.58 C132.921,70.58 131.676,71.252 130.562,72.424 C129.54,73.501 128.711,74.931 128.287,76.348 L121.827,97.976 C121.81,98.034 121.771,98.082 121.72,98.112 L94.398,113.886 C94.362,113.907 94.322,113.917 94.282,113.917 L94.282,113.917 Z M94.515,62.948 L94.515,113.279 L121.406,97.754 L127.84,76.215 C128.29,74.708 129.137,73.247 130.224,72.103 C131.425,70.838 132.793,70.112 133.977,70.112 C134.995,70.112 135.795,70.638 136.23,71.592 L142.584,85.526 L169.566,69.948 L169.566,19.617 L94.515,62.948 L94.515,62.948 Z" id="Fill-18" fill="#607D8B"></path>
                    <path d="M109.894,92.943 L109.894,92.943 C108.12,92.943 106.653,92.218 105.65,90.823 C105.583,90.731 105.593,90.61 105.673,90.529 C105.753,90.448 105.88,90.44 105.974,90.506 C106.754,91.053 107.679,91.333 108.724,91.333 C110.047,91.333 111.478,90.894 112.98,90.027 C118.291,86.96 122.611,79.509 122.611,73.416 C122.611,71.489 122.169,69.856 121.333,68.692 C121.266,68.6 121.276,68.473 121.356,68.392 C121.436,68.311 121.563,68.299 121.656,68.365 C123.327,69.537 124.247,71.746 124.247,74.584 C124.247,80.826 119.821,88.447 114.382,91.587 C112.808,92.495 111.298,92.943 109.894,92.943 L109.894,92.943 Z M106.925,91.401 C107.738,92.052 108.745,92.278 109.893,92.278 L109.894,92.278 C111.215,92.278 112.647,91.951 114.148,91.084 C119.459,88.017 123.78,80.621 123.78,74.528 C123.78,72.549 123.317,70.929 122.454,69.767 C122.865,70.802 123.079,72.042 123.079,73.402 C123.079,79.645 118.653,87.285 113.214,90.425 C111.64,91.334 110.13,91.742 108.724,91.742 C108.083,91.742 107.481,91.593 106.925,91.401 L106.925,91.401 Z" id="Fill-19" fill="#607D8B"></path>
                    <path d="M113.097,90.23 C118.481,87.122 122.845,79.594 122.845,73.416 C122.845,71.365 122.362,69.724 121.522,68.556 C119.738,67.304 117.148,67.362 114.265,69.026 C108.881,72.134 104.517,79.662 104.517,85.84 C104.517,87.891 105,89.532 105.84,90.7 C107.624,91.952 110.214,91.894 113.097,90.23" id="Fill-20" fill="#FAFAFA"></path>
                    <path d="M108.724,91.614 L108.724,91.614 C107.582,91.614 106.566,91.401 105.705,90.797 C105.684,90.783 105.665,90.811 105.65,90.79 C104.756,89.546 104.283,87.842 104.283,85.817 C104.283,79.575 108.709,71.953 114.148,68.812 C115.722,67.904 117.232,67.449 118.638,67.449 C119.78,67.449 120.796,67.758 121.656,68.362 C121.678,68.377 121.697,68.397 121.712,68.418 C122.606,69.662 123.079,71.39 123.079,73.415 C123.079,79.658 118.653,87.198 113.214,90.338 C111.64,91.247 110.13,91.614 108.724,91.614 L108.724,91.614 Z M106.006,90.505 C106.78,91.037 107.694,91.281 108.724,91.281 C110.047,91.281 111.478,90.868 112.98,90.001 C118.291,86.935 122.611,79.496 122.611,73.403 C122.611,71.494 122.177,69.88 121.356,68.718 C120.582,68.185 119.668,67.919 118.638,67.919 C117.315,67.919 115.883,68.36 114.382,69.227 C109.071,72.293 104.751,79.733 104.751,85.826 C104.751,87.735 105.185,89.343 106.006,90.505 L106.006,90.505 Z" id="Fill-21" fill="#607D8B"></path>
                    <path d="M149.318,7.262 L139.334,16.14 L155.227,27.171 L160.816,21.059 L149.318,7.262" id="Fill-22" fill="#FAFAFA"></path>
                    <path d="M169.676,13.84 L159.928,19.467 C156.286,21.57 150.4,21.58 146.781,19.491 C143.161,17.402 143.18,14.003 146.822,11.9 L156.317,6.292 L149.588,2.407 L67.752,49.478 L113.675,75.992 L116.756,74.213 C117.387,73.848 117.625,73.315 117.374,72.823 C115.017,68.191 114.781,63.277 116.691,58.561 C122.329,44.641 141.2,33.746 165.309,30.491 C173.478,29.388 181.989,29.524 190.013,30.885 C190.865,31.03 191.789,30.893 192.42,30.528 L195.501,28.75 L169.676,13.84" id="Fill-23" fill="#FAFAFA"></path>
                    <path d="M113.675,76.459 C113.594,76.459 113.514,76.438 113.442,76.397 L67.518,49.882 C67.374,49.799 67.284,49.645 67.285,49.478 C67.285,49.311 67.374,49.157 67.519,49.073 L149.355,2.002 C149.499,1.919 149.677,1.919 149.821,2.002 L156.55,5.887 C156.774,6.017 156.85,6.302 156.722,6.526 C156.592,6.749 156.307,6.826 156.083,6.696 L149.587,2.946 L68.687,49.479 L113.675,75.452 L116.523,73.808 C116.715,73.697 117.143,73.399 116.958,73.035 C114.542,68.287 114.3,63.221 116.258,58.385 C119.064,51.458 125.143,45.143 133.84,40.122 C142.497,35.124 153.358,31.633 165.247,30.028 C173.445,28.921 182.037,29.058 190.091,30.425 C190.83,30.55 191.652,30.432 192.186,30.124 L194.567,28.75 L169.442,14.244 C169.219,14.115 169.142,13.829 169.271,13.606 C169.4,13.382 169.685,13.306 169.909,13.435 L195.734,28.345 C195.879,28.428 195.968,28.583 195.968,28.75 C195.968,28.916 195.879,29.071 195.734,29.154 L192.653,30.933 C191.932,31.35 190.89,31.508 189.935,31.346 C181.972,29.995 173.478,29.86 165.372,30.954 C153.602,32.543 142.86,35.993 134.307,40.931 C125.793,45.847 119.851,52.004 117.124,58.736 C115.27,63.314 115.501,68.112 117.79,72.611 C118.16,73.336 117.845,74.124 116.99,74.617 L113.909,76.397 C113.836,76.438 113.756,76.459 113.675,76.459" id="Fill-24" fill="#455A64"></path>
                    <path d="M153.316,21.279 C150.903,21.279 148.495,20.751 146.664,19.693 C144.846,18.644 143.844,17.232 143.844,15.718 C143.844,14.191 144.86,12.763 146.705,11.698 L156.198,6.091 C156.309,6.025 156.452,6.062 156.518,6.173 C156.583,6.284 156.547,6.427 156.436,6.493 L146.94,12.102 C145.244,13.081 144.312,14.365 144.312,15.718 C144.312,17.058 145.23,18.326 146.897,19.289 C150.446,21.338 156.24,21.327 159.811,19.265 L169.559,13.637 C169.67,13.573 169.813,13.611 169.878,13.723 C169.943,13.834 169.904,13.977 169.793,14.042 L160.045,19.67 C158.187,20.742 155.749,21.279 153.316,21.279" id="Fill-25" fill="#607D8B"></path>
                    <path d="M113.675,75.992 L67.762,49.484" id="Fill-26" fill="#455A64"></path>
                    <path d="M113.675,76.342 C113.615,76.342 113.555,76.327 113.5,76.295 L67.587,49.787 C67.419,49.69 67.362,49.476 67.459,49.309 C67.556,49.141 67.77,49.083 67.937,49.18 L113.85,75.688 C114.018,75.785 114.075,76 113.978,76.167 C113.914,76.279 113.796,76.342 113.675,76.342" id="Fill-27" fill="#455A64"></path>
                    <path d="M67.762,49.484 L67.762,103.485 C67.762,104.575 68.532,105.903 69.482,106.452 L111.955,130.973 C112.905,131.522 113.675,131.083 113.675,129.993 L113.675,75.992" id="Fill-28" fill="#FAFAFA"></path>
                    <path d="M112.727,131.561 C112.43,131.561 112.107,131.466 111.78,131.276 L69.307,106.755 C68.244,106.142 67.412,104.705 67.412,103.485 L67.412,49.484 C67.412,49.29 67.569,49.134 67.762,49.134 C67.956,49.134 68.113,49.29 68.113,49.484 L68.113,103.485 C68.113,104.445 68.82,105.665 69.657,106.148 L112.13,130.67 C112.474,130.868 112.791,130.913 113,130.792 C113.206,130.673 113.325,130.381 113.325,129.993 L113.325,75.992 C113.325,75.798 113.482,75.641 113.675,75.641 C113.869,75.641 114.025,75.798 114.025,75.992 L114.025,129.993 C114.025,130.648 113.786,131.147 113.35,131.399 C113.162,131.507 112.952,131.561 112.727,131.561" id="Fill-29" fill="#455A64"></path>
                    <path d="M112.86,40.512 C112.86,40.512 112.86,40.512 112.859,40.512 C110.541,40.512 108.36,39.99 106.717,39.041 C105.012,38.057 104.074,36.726 104.074,35.292 C104.074,33.847 105.026,32.501 106.754,31.504 L118.795,24.551 C120.463,23.589 122.669,23.058 125.007,23.058 C127.325,23.058 129.506,23.581 131.15,24.53 C132.854,25.514 133.793,26.845 133.793,28.278 C133.793,29.724 132.841,31.069 131.113,32.067 L119.071,39.019 C117.403,39.982 115.197,40.512 112.86,40.512 L112.86,40.512 Z M125.007,23.759 C122.79,23.759 120.709,24.256 119.146,25.158 L107.104,32.11 C105.602,32.978 104.774,34.108 104.774,35.292 C104.774,36.465 105.589,37.581 107.067,38.434 C108.605,39.323 110.663,39.812 112.859,39.812 L112.86,39.812 C115.076,39.812 117.158,39.315 118.721,38.413 L130.762,31.46 C132.264,30.593 133.092,29.463 133.092,28.278 C133.092,27.106 132.278,25.99 130.8,25.136 C129.261,24.248 127.204,23.759 125.007,23.759 L125.007,23.759 Z" id="Fill-30" fill="#607D8B"></path>
                    <path d="M165.63,16.219 L159.896,19.53 C156.729,21.358 151.61,21.367 148.463,19.55 C145.316,17.733 145.332,14.778 148.499,12.949 L154.233,9.639 L165.63,16.219" id="Fill-31" fill="#FAFAFA"></path>
                    <path d="M154.233,10.448 L164.228,16.219 L159.546,18.923 C158.112,19.75 156.194,20.206 154.147,20.206 C152.118,20.206 150.224,19.757 148.814,18.943 C147.524,18.199 146.814,17.249 146.814,16.269 C146.814,15.278 147.537,14.314 148.85,13.556 L154.233,10.448 M154.233,9.639 L148.499,12.949 C145.332,14.778 145.316,17.733 148.463,19.55 C150.031,20.455 152.086,20.907 154.147,20.907 C156.224,20.907 158.306,20.447 159.896,19.53 L165.63,16.219 L154.233,9.639" id="Fill-32" fill="#607D8B"></path>
                    <path d="M145.445,72.667 L145.445,72.667 C143.672,72.667 142.204,71.817 141.202,70.422 C141.135,70.33 141.145,70.147 141.225,70.066 C141.305,69.985 141.432,69.946 141.525,70.011 C142.306,70.559 143.231,70.823 144.276,70.822 C145.598,70.822 147.03,70.376 148.532,69.509 C153.842,66.443 158.163,58.987 158.163,52.894 C158.163,50.967 157.721,49.332 156.884,48.168 C156.818,48.076 156.828,47.948 156.908,47.867 C156.988,47.786 157.114,47.774 157.208,47.84 C158.878,49.012 159.798,51.22 159.798,54.059 C159.798,60.301 155.373,68.046 149.933,71.186 C148.36,72.094 146.85,72.667 145.445,72.667 L145.445,72.667 Z M142.476,71 C143.29,71.651 144.296,72.002 145.445,72.002 C146.767,72.002 148.198,71.55 149.7,70.682 C155.01,67.617 159.331,60.159 159.331,54.065 C159.331,52.085 158.868,50.435 158.006,49.272 C158.417,50.307 158.63,51.532 158.63,52.892 C158.63,59.134 154.205,66.767 148.765,69.907 C147.192,70.816 145.681,71.283 144.276,71.283 C143.634,71.283 143.033,71.192 142.476,71 L142.476,71 Z" id="Fill-33" fill="#607D8B"></path>
                    <path d="M148.648,69.704 C154.032,66.596 158.396,59.068 158.396,52.891 C158.396,50.839 157.913,49.198 157.074,48.03 C155.289,46.778 152.699,46.836 149.816,48.501 C144.433,51.609 140.068,59.137 140.068,65.314 C140.068,67.365 140.552,69.006 141.391,70.174 C143.176,71.427 145.765,71.369 148.648,69.704" id="Fill-34" fill="#FAFAFA"></path>
                    <path d="M144.276,71.276 L144.276,71.276 C143.133,71.276 142.118,70.969 141.257,70.365 C141.236,70.351 141.217,70.332 141.202,70.311 C140.307,69.067 139.835,67.339 139.835,65.314 C139.835,59.073 144.26,51.439 149.7,48.298 C151.273,47.39 152.784,46.929 154.189,46.929 C155.332,46.929 156.347,47.236 157.208,47.839 C157.229,47.854 157.248,47.873 157.263,47.894 C158.157,49.138 158.63,50.865 158.63,52.891 C158.63,59.132 154.205,66.766 148.765,69.907 C147.192,70.815 145.681,71.276 144.276,71.276 L144.276,71.276 Z M141.558,70.104 C142.331,70.637 143.245,71.005 144.276,71.005 C145.598,71.005 147.03,70.467 148.532,69.6 C153.842,66.534 158.163,59.033 158.163,52.939 C158.163,51.031 157.729,49.385 156.907,48.223 C156.133,47.691 155.219,47.409 154.189,47.409 C152.867,47.409 151.435,47.842 149.933,48.709 C144.623,51.775 140.302,59.273 140.302,65.366 C140.302,67.276 140.736,68.942 141.558,70.104 L141.558,70.104 Z" id="Fill-35" fill="#607D8B"></path>
                    <path d="M150.72,65.361 L150.357,65.066 C151.147,64.092 151.869,63.04 152.505,61.938 C153.313,60.539 153.978,59.067 154.482,57.563 L154.925,57.712 C154.412,59.245 153.733,60.745 152.91,62.172 C152.262,63.295 151.525,64.368 150.72,65.361" id="Fill-36" fill="#607D8B"></path>
                    <path d="M115.917,84.514 L115.554,84.22 C116.344,83.245 117.066,82.194 117.702,81.092 C118.51,79.692 119.175,78.22 119.678,76.717 L120.121,76.865 C119.608,78.398 118.93,79.899 118.106,81.326 C117.458,82.448 116.722,83.521 115.917,84.514" id="Fill-37" fill="#607D8B"></path>
                    <path d="M114,130.476 L114,130.008 L114,76.052 L114,75.584 L114,76.052 L114,130.008 L114,130.476" id="Fill-38" fill="#607D8B"></path>
                </g>
                <g id="Imported-Layers-Copy" transform="translate(62.000000, 0.000000)" sketch:type="MSShapeGroup">
                    <path d="M19.822,37.474 C19.839,37.339 19.747,37.194 19.555,37.082 C19.228,36.894 18.729,36.872 18.446,37.037 L12.434,40.508 C12.303,40.584 12.24,40.686 12.243,40.793 C12.245,40.925 12.245,41.254 12.245,41.371 L12.245,41.414 L12.238,41.542 C8.148,43.887 5.647,45.321 5.647,45.321 C5.646,45.321 3.57,46.367 2.86,50.513 C2.86,50.513 1.948,57.474 1.962,70.258 C1.977,82.828 2.568,87.328 3.129,91.609 C3.349,93.293 6.13,93.734 6.13,93.734 C6.461,93.774 6.828,93.707 7.21,93.486 L82.483,49.935 C84.291,48.866 85.15,46.216 85.539,43.651 C86.752,35.661 87.214,10.673 85.264,3.773 C85.068,3.08 84.754,2.69 84.396,2.491 L82.31,1.701 C81.583,1.729 80.894,2.168 80.776,2.236 C80.636,2.317 41.807,24.585 20.032,37.072 L19.822,37.474" id="Fill-1" fill="#FFFFFF"></path>
                    <path d="M82.311,1.701 L84.396,2.491 C84.754,2.69 85.068,3.08 85.264,3.773 C87.213,10.673 86.751,35.66 85.539,43.651 C85.149,46.216 84.29,48.866 82.483,49.935 L7.21,93.486 C6.897,93.667 6.595,93.744 6.314,93.744 L6.131,93.733 C6.131,93.734 3.349,93.293 3.128,91.609 C2.568,87.327 1.977,82.828 1.963,70.258 C1.948,57.474 2.86,50.513 2.86,50.513 C3.57,46.367 5.647,45.321 5.647,45.321 C5.647,45.321 8.148,43.887 12.238,41.542 L12.245,41.414 L12.245,41.371 C12.245,41.254 12.245,40.925 12.243,40.793 C12.24,40.686 12.302,40.583 12.434,40.508 L18.446,37.036 C18.574,36.962 18.746,36.926 18.927,36.926 C19.145,36.926 19.376,36.979 19.554,37.082 C19.747,37.194 19.839,37.34 19.822,37.474 L20.033,37.072 C41.806,24.585 80.636,2.318 80.777,2.236 C80.894,2.168 81.583,1.729 82.311,1.701 M82.311,0.704 L82.272,0.705 C81.654,0.728 80.989,0.949 80.298,1.361 L80.277,1.373 C80.129,1.458 59.768,13.135 19.758,36.079 C19.5,35.981 19.214,35.929 18.927,35.929 C18.562,35.929 18.223,36.013 17.947,36.173 L11.935,39.644 C11.493,39.899 11.236,40.334 11.246,40.81 L11.247,40.96 L5.167,44.447 C4.794,44.646 2.625,45.978 1.877,50.345 L1.871,50.384 C1.862,50.454 0.951,57.557 0.965,70.259 C0.979,82.879 1.568,87.375 2.137,91.724 L2.139,91.739 C2.447,94.094 5.614,94.662 5.975,94.719 L6.009,94.723 C6.11,94.736 6.213,94.742 6.314,94.742 C6.79,94.742 7.26,94.61 7.71,94.35 L82.983,50.798 C84.794,49.727 85.982,47.375 86.525,43.801 C87.711,35.987 88.259,10.705 86.224,3.502 C85.971,2.609 85.52,1.975 84.881,1.62 L84.749,1.558 L82.664,0.769 C82.551,0.725 82.431,0.704 82.311,0.704" id="Fill-2" fill="#455A64"></path>
                    <path d="M66.267,11.565 L67.762,11.999 L11.423,44.325" id="Fill-3" fill="#FFFFFF"></path>
                    <path d="M12.202,90.545 C12.029,90.545 11.862,90.455 11.769,90.295 C11.632,90.057 11.713,89.752 11.952,89.614 L30.389,78.969 C30.628,78.831 30.933,78.913 31.071,79.152 C31.208,79.39 31.127,79.696 30.888,79.833 L12.451,90.478 L12.202,90.545" id="Fill-4" fill="#607D8B"></path>
                    <path d="M13.764,42.654 L13.656,42.592 L13.702,42.421 L18.837,39.457 L19.007,39.502 L18.962,39.673 L13.827,42.637 L13.764,42.654" id="Fill-5" fill="#607D8B"></path>
                    <path d="M8.52,90.375 L8.52,46.421 L8.583,46.385 L75.84,7.554 L75.84,51.508 L75.778,51.544 L8.52,90.375 L8.52,90.375 Z M8.77,46.564 L8.77,89.944 L75.591,51.365 L75.591,7.985 L8.77,46.564 L8.77,46.564 Z" id="Fill-6" fill="#607D8B"></path>
                    <path d="M24.986,83.182 C24.756,83.331 24.374,83.566 24.137,83.705 L12.632,90.406 C12.395,90.545 12.426,90.658 12.7,90.658 L13.265,90.658 C13.54,90.658 13.958,90.545 14.195,90.406 L25.7,83.705 C25.937,83.566 26.128,83.452 26.125,83.449 C26.122,83.447 26.119,83.22 26.119,82.946 C26.119,82.672 25.931,82.569 25.701,82.719 L24.986,83.182" id="Fill-7" fill="#607D8B"></path>
                    <path d="M13.266,90.782 L12.7,90.782 C12.5,90.782 12.384,90.726 12.354,90.616 C12.324,90.506 12.397,90.399 12.569,90.299 L24.074,83.597 C24.31,83.459 24.689,83.226 24.918,83.078 L25.633,82.614 C25.723,82.555 25.813,82.525 25.899,82.525 C26.071,82.525 26.244,82.655 26.244,82.946 C26.244,83.16 26.245,83.309 26.247,83.383 L26.253,83.387 L26.249,83.456 C26.246,83.531 26.246,83.531 25.763,83.812 L14.258,90.514 C14,90.665 13.564,90.782 13.266,90.782 L13.266,90.782 Z M12.666,90.532 L12.7,90.533 L13.266,90.533 C13.518,90.533 13.915,90.425 14.132,90.299 L25.637,83.597 C25.805,83.499 25.931,83.424 25.998,83.383 C25.994,83.299 25.994,83.165 25.994,82.946 L25.899,82.775 L25.768,82.824 L25.054,83.287 C24.822,83.437 24.438,83.673 24.2,83.812 L12.695,90.514 L12.666,90.532 L12.666,90.532 Z" id="Fill-8" fill="#607D8B"></path>
                    <path d="M13.266,89.871 L12.7,89.871 C12.5,89.871 12.384,89.815 12.354,89.705 C12.324,89.595 12.397,89.488 12.569,89.388 L24.074,82.686 C24.332,82.535 24.768,82.418 25.067,82.418 L25.632,82.418 C25.832,82.418 25.948,82.474 25.978,82.584 C26.008,82.694 25.935,82.801 25.763,82.901 L14.258,89.603 C14,89.754 13.564,89.871 13.266,89.871 L13.266,89.871 Z M12.666,89.621 L12.7,89.622 L13.266,89.622 C13.518,89.622 13.915,89.515 14.132,89.388 L25.637,82.686 L25.667,82.668 L25.632,82.667 L25.067,82.667 C24.815,82.667 24.418,82.775 24.2,82.901 L12.695,89.603 L12.666,89.621 L12.666,89.621 Z" id="Fill-9" fill="#607D8B"></path>
                    <path d="M12.37,90.801 L12.37,89.554 L12.37,90.801" id="Fill-10" fill="#607D8B"></path>
                    <path d="M6.13,93.901 C5.379,93.808 4.816,93.164 4.691,92.525 C3.86,88.287 3.54,83.743 3.526,71.173 C3.511,58.389 4.423,51.428 4.423,51.428 C5.134,47.282 7.21,46.236 7.21,46.236 C7.21,46.236 81.667,3.25 82.069,3.017 C82.292,2.888 84.556,1.433 85.264,3.94 C87.214,10.84 86.752,35.827 85.539,43.818 C85.15,46.383 84.291,49.033 82.483,50.101 L7.21,93.653 C6.828,93.874 6.461,93.941 6.13,93.901 C6.13,93.901 3.349,93.46 3.129,91.776 C2.568,87.495 1.977,82.995 1.962,70.425 C1.948,57.641 2.86,50.68 2.86,50.68 C3.57,46.534 5.647,45.489 5.647,45.489 C5.646,45.489 8.065,44.092 12.245,41.679 L13.116,41.56 L19.715,37.73 L19.761,37.269 L6.13,93.901" id="Fill-11" fill="#FAFAFA"></path>
                    <path d="M6.317,94.161 L6.102,94.148 L6.101,94.148 L5.857,94.101 C5.138,93.945 3.085,93.365 2.881,91.809 C2.313,87.469 1.727,82.996 1.713,70.425 C1.699,57.771 2.604,50.718 2.613,50.648 C3.338,46.417 5.445,45.31 5.535,45.266 L12.163,41.439 L13.033,41.32 L19.479,37.578 L19.513,37.244 C19.526,37.107 19.647,37.008 19.786,37.021 C19.922,37.034 20.023,37.156 20.009,37.293 L19.95,37.882 L13.198,41.801 L12.328,41.919 L5.772,45.704 C5.741,45.72 3.782,46.772 3.106,50.722 C3.099,50.782 2.198,57.808 2.212,70.424 C2.226,82.963 2.809,87.42 3.373,91.729 C3.464,92.42 4.062,92.883 4.682,93.181 C4.566,92.984 4.486,92.776 4.446,92.572 C3.665,88.588 3.291,84.37 3.276,71.173 C3.262,58.52 4.167,51.466 4.176,51.396 C4.901,47.165 7.008,46.059 7.098,46.014 C7.094,46.015 81.542,3.034 81.944,2.802 L81.972,2.785 C82.876,2.247 83.692,2.097 84.332,2.352 C84.887,2.573 85.281,3.085 85.504,3.872 C87.518,11 86.964,36.091 85.785,43.855 C85.278,47.196 84.21,49.37 82.61,50.317 L7.335,93.869 C6.999,94.063 6.658,94.161 6.317,94.161 L6.317,94.161 Z M6.17,93.654 C6.463,93.69 6.774,93.617 7.085,93.437 L82.358,49.886 C84.181,48.808 84.96,45.971 85.292,43.78 C86.466,36.049 87.023,11.085 85.024,4.008 C84.846,3.377 84.551,2.976 84.148,2.816 C83.664,2.623 82.982,2.764 82.227,3.213 L82.193,3.234 C81.791,3.466 7.335,46.452 7.335,46.452 C7.304,46.469 5.346,47.521 4.669,51.471 C4.662,51.53 3.761,58.556 3.775,71.173 C3.79,84.328 4.161,88.524 4.936,92.476 C5.026,92.937 5.412,93.459 5.973,93.615 C6.087,93.64 6.158,93.652 6.169,93.654 L6.17,93.654 L6.17,93.654 Z" id="Fill-12" fill="#455A64"></path>
                    <path d="M7.317,68.982 C7.806,68.701 8.202,68.926 8.202,69.487 C8.202,70.047 7.806,70.73 7.317,71.012 C6.829,71.294 6.433,71.069 6.433,70.508 C6.433,69.948 6.829,69.265 7.317,68.982" id="Fill-13" fill="#FFFFFF"></path>
                    <path d="M6.92,71.133 C6.631,71.133 6.433,70.905 6.433,70.508 C6.433,69.948 6.829,69.265 7.317,68.982 C7.46,68.9 7.595,68.861 7.714,68.861 C8.003,68.861 8.202,69.09 8.202,69.487 C8.202,70.047 7.806,70.73 7.317,71.012 C7.174,71.094 7.039,71.133 6.92,71.133 M7.714,68.674 C7.557,68.674 7.392,68.723 7.224,68.821 C6.676,69.138 6.246,69.879 6.246,70.508 C6.246,70.994 6.517,71.32 6.92,71.32 C7.078,71.32 7.243,71.271 7.411,71.174 C7.959,70.857 8.389,70.117 8.389,69.487 C8.389,69.001 8.117,68.674 7.714,68.674" id="Fill-14" fill="#8097A2"></path>
                    <path d="M6.92,70.947 C6.649,70.947 6.621,70.64 6.621,70.508 C6.621,70.017 6.982,69.392 7.411,69.145 C7.521,69.082 7.625,69.049 7.714,69.049 C7.986,69.049 8.015,69.355 8.015,69.487 C8.015,69.978 7.652,70.603 7.224,70.851 C7.115,70.914 7.01,70.947 6.92,70.947 M7.714,68.861 C7.595,68.861 7.46,68.9 7.317,68.982 C6.829,69.265 6.433,69.948 6.433,70.508 C6.433,70.905 6.631,71.133 6.92,71.133 C7.039,71.133 7.174,71.094 7.317,71.012 C7.806,70.73 8.202,70.047 8.202,69.487 C8.202,69.09 8.003,68.861 7.714,68.861" id="Fill-15" fill="#8097A2"></path>
                    <path d="M7.444,85.35 C7.708,85.198 7.921,85.319 7.921,85.622 C7.921,85.925 7.708,86.292 7.444,86.444 C7.181,86.597 6.967,86.475 6.967,86.173 C6.967,85.871 7.181,85.502 7.444,85.35" id="Fill-16" fill="#FFFFFF"></path>
                    <path d="M7.23,86.51 C7.074,86.51 6.967,86.387 6.967,86.173 C6.967,85.871 7.181,85.502 7.444,85.35 C7.521,85.305 7.594,85.284 7.658,85.284 C7.814,85.284 7.921,85.408 7.921,85.622 C7.921,85.925 7.708,86.292 7.444,86.444 C7.367,86.489 7.294,86.51 7.23,86.51 M7.658,85.098 C7.558,85.098 7.455,85.127 7.351,85.188 C7.031,85.373 6.781,85.806 6.781,86.173 C6.781,86.482 6.966,86.697 7.23,86.697 C7.33,86.697 7.433,86.666 7.538,86.607 C7.858,86.422 8.108,85.989 8.108,85.622 C8.108,85.313 7.923,85.098 7.658,85.098" id="Fill-17" fill="#8097A2"></path>
                    <path d="M7.23,86.322 L7.154,86.173 C7.154,85.938 7.333,85.629 7.538,85.512 L7.658,85.471 L7.734,85.622 C7.734,85.856 7.555,86.164 7.351,86.282 L7.23,86.322 M7.658,85.284 C7.594,85.284 7.521,85.305 7.444,85.35 C7.181,85.502 6.967,85.871 6.967,86.173 C6.967,86.387 7.074,86.51 7.23,86.51 C7.294,86.51 7.367,86.489 7.444,86.444 C7.708,86.292 7.921,85.925 7.921,85.622 C7.921,85.408 7.814,85.284 7.658,85.284" id="Fill-18" fill="#8097A2"></path>
                    <path d="M77.278,7.769 L77.278,51.436 L10.208,90.16 L10.208,46.493 L77.278,7.769" id="Fill-19" fill="#455A64"></path>
                    <path d="M10.083,90.375 L10.083,46.421 L10.146,46.385 L77.403,7.554 L77.403,51.508 L77.341,51.544 L10.083,90.375 L10.083,90.375 Z M10.333,46.564 L10.333,89.944 L77.154,51.365 L77.154,7.985 L10.333,46.564 L10.333,46.564 Z" id="Fill-20" fill="#607D8B"></path>
                </g>
                <path d="M125.737,88.647 L118.098,91.981 L118.098,84 L106.639,88.713 L106.639,96.982 L99,100.315 L112.369,103.961 L125.737,88.647" id="Imported-Layers-Copy-2" fill="#455A64" sketch:type="MSShapeGroup"></path>
            </g>
        </g>
    </g>
</svg>');
+};
+
+module.exports = RotateInstructions;
+
+},{"./util.js":22}],17:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * TODO: Fix up all "new THREE" instantiations to improve performance.
+ */
+var SensorSample = _dereq_('./sensor-sample.js');
+var MathUtil = _dereq_('../math-util.js');
+var Util = _dereq_('../util.js');
+
+var DEBUG = false;
+
+/**
+ * An implementation of a simple complementary filter, which fuses gyroscope and
+ * accelerometer data from the 'devicemotion' event.
+ *
+ * Accelerometer data is very noisy, but stable over the long term.
+ * Gyroscope data is smooth, but tends to drift over the long term.
+ *
+ * This fusion is relatively simple:
+ * 1. Get orientation estimates from accelerometer by applying a low-pass filter
+ * on that data.
+ * 2. Get orientation estimates from gyroscope by integrating over time.
+ * 3. Combine the two estimates, weighing (1) in the long term, but (2) for the
+ * short term.
+ */
+function ComplementaryFilter(kFilter) {
+ this.kFilter = kFilter;
+
+ // Raw sensor measurements.
+ this.currentAccelMeasurement = new SensorSample();
+ this.currentGyroMeasurement = new SensorSample();
+ this.previousGyroMeasurement = new SensorSample();
+
+ // Set the quaternion to be looking in the -z direction by default.
+ this.filterQ = new MathUtil.Quaternion(1, 0, 0, 1);
+ this.previousFilterQ = new MathUtil.Quaternion();
+
+ // Orientation based on the accelerometer.
+ this.accelQ = new MathUtil.Quaternion();
+ // Whether or not the orientation has been initialized.
+ this.isOrientationInitialized = false;
+ // Running estimate of gravity based on the current orientation.
+ this.estimatedGravity = new MathUtil.Vector3();
+ // Measured gravity based on accelerometer.
+ this.measuredGravity = new MathUtil.Vector3();
+
+ // Debug only quaternion of gyro-based orientation.
+ this.gyroIntegralQ = new MathUtil.Quaternion();
+}
+
+ComplementaryFilter.prototype.addAccelMeasurement = function(vector, timestampS) {
+ this.currentAccelMeasurement.set(vector, timestampS);
+};
+
+ComplementaryFilter.prototype.addGyroMeasurement = function(vector, timestampS) {
+ this.currentGyroMeasurement.set(vector, timestampS);
+
+ var deltaT = timestampS - this.previousGyroMeasurement.timestampS;
+ if (Util.isTimestampDeltaValid(deltaT)) {
+ this.run_();
+ }
+
+ this.previousGyroMeasurement.copy(this.currentGyroMeasurement);
+};
+
+ComplementaryFilter.prototype.run_ = function() {
+
+ if (!this.isOrientationInitialized) {
+ this.accelQ = this.accelToQuaternion_(this.currentAccelMeasurement.sample);
+ this.previousFilterQ.copy(this.accelQ);
+ this.isOrientationInitialized = true;
+ return;
+ }
+
+ var deltaT = this.currentGyroMeasurement.timestampS -
+ this.previousGyroMeasurement.timestampS;
+
+ // Convert gyro rotation vector to a quaternion delta.
+ var gyroDeltaQ = this.gyroToQuaternionDelta_(this.currentGyroMeasurement.sample, deltaT);
+ this.gyroIntegralQ.multiply(gyroDeltaQ);
+
+ // filter_1 = K * (filter_0 + gyro * dT) + (1 - K) * accel.
+ this.filterQ.copy(this.previousFilterQ);
+ this.filterQ.multiply(gyroDeltaQ);
+
+ // Calculate the delta between the current estimated gravity and the real
+ // gravity vector from accelerometer.
+ var invFilterQ = new MathUtil.Quaternion();
+ invFilterQ.copy(this.filterQ);
+ invFilterQ.inverse();
+
+ this.estimatedGravity.set(0, 0, -1);
+ this.estimatedGravity.applyQuaternion(invFilterQ);
+ this.estimatedGravity.normalize();
+
+ this.measuredGravity.copy(this.currentAccelMeasurement.sample);
+ this.measuredGravity.normalize();
+
+ // Compare estimated gravity with measured gravity, get the delta quaternion
+ // between the two.
+ var deltaQ = new MathUtil.Quaternion();
+ deltaQ.setFromUnitVectors(this.estimatedGravity, this.measuredGravity);
+ deltaQ.inverse();
+
+ if (DEBUG) {
+ console.log('Delta: %d deg, G_est: (%s, %s, %s), G_meas: (%s, %s, %s)',
+ MathUtil.radToDeg * Util.getQuaternionAngle(deltaQ),
+ (this.estimatedGravity.x).toFixed(1),
+ (this.estimatedGravity.y).toFixed(1),
+ (this.estimatedGravity.z).toFixed(1),
+ (this.measuredGravity.x).toFixed(1),
+ (this.measuredGravity.y).toFixed(1),
+ (this.measuredGravity.z).toFixed(1));
+ }
+
+ // Calculate the SLERP target: current orientation plus the measured-estimated
+ // quaternion delta.
+ var targetQ = new MathUtil.Quaternion();
+ targetQ.copy(this.filterQ);
+ targetQ.multiply(deltaQ);
+
+ // SLERP factor: 0 is pure gyro, 1 is pure accel.
+ this.filterQ.slerp(targetQ, 1 - this.kFilter);
+
+ this.previousFilterQ.copy(this.filterQ);
+};
+
+ComplementaryFilter.prototype.getOrientation = function() {
+ return this.filterQ;
+};
+
+ComplementaryFilter.prototype.accelToQuaternion_ = function(accel) {
+ var normAccel = new MathUtil.Vector3();
+ normAccel.copy(accel);
+ normAccel.normalize();
+ var quat = new MathUtil.Quaternion();
+ quat.setFromUnitVectors(new MathUtil.Vector3(0, 0, -1), normAccel);
+ quat.inverse();
+ return quat;
+};
+
+ComplementaryFilter.prototype.gyroToQuaternionDelta_ = function(gyro, dt) {
+ // Extract axis and angle from the gyroscope data.
+ var quat = new MathUtil.Quaternion();
+ var axis = new MathUtil.Vector3();
+ axis.copy(gyro);
+ axis.normalize();
+ quat.setFromAxisAngle(axis, gyro.length() * dt);
+ return quat;
+};
+
+
+module.exports = ComplementaryFilter;
+
+},{"../math-util.js":14,"../util.js":22,"./sensor-sample.js":20}],18:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var ComplementaryFilter = _dereq_('./complementary-filter.js');
+var PosePredictor = _dereq_('./pose-predictor.js');
+var TouchPanner = _dereq_('../touch-panner.js');
+var MathUtil = _dereq_('../math-util.js');
+var Util = _dereq_('../util.js');
+
+/**
+ * The pose sensor, implemented using DeviceMotion APIs.
+ */
+function FusionPoseSensor() {
+ this.deviceId = 'webvr-polyfill:fused';
+ this.deviceName = 'VR Position Device (webvr-polyfill:fused)';
+
+ this.accelerometer = new MathUtil.Vector3();
+ this.gyroscope = new MathUtil.Vector3();
+
+ window.addEventListener('devicemotion', this.onDeviceMotionChange_.bind(this));
+ window.addEventListener('orientationchange', this.onScreenOrientationChange_.bind(this));
+
+ this.filter = new ComplementaryFilter(WebVRConfig.K_FILTER);
+ this.posePredictor = new PosePredictor(WebVRConfig.PREDICTION_TIME_S);
+ this.touchPanner = new TouchPanner();
+
+ this.filterToWorldQ = new MathUtil.Quaternion();
+
+ // Set the filter to world transform, depending on OS.
+ if (Util.isIOS()) {
+ this.filterToWorldQ.setFromAxisAngle(new MathUtil.Vector3(1, 0, 0), Math.PI / 2);
+ } else {
+ this.filterToWorldQ.setFromAxisAngle(new MathUtil.Vector3(1, 0, 0), -Math.PI / 2);
+ }
+
+ this.inverseWorldToScreenQ = new MathUtil.Quaternion();
+ this.worldToScreenQ = new MathUtil.Quaternion();
+ this.originalPoseAdjustQ = new MathUtil.Quaternion();
+ this.originalPoseAdjustQ.setFromAxisAngle(new MathUtil.Vector3(0, 0, 1),
+ -window.orientation * Math.PI / 180);
+
+ this.setScreenTransform_();
+ // Adjust this filter for being in landscape mode.
+ if (Util.isLandscapeMode()) {
+ this.filterToWorldQ.multiply(this.inverseWorldToScreenQ);
+ }
+
+ // Keep track of a reset transform for resetSensor.
+ this.resetQ = new MathUtil.Quaternion();
+
+ this.isFirefoxAndroid = Util.isFirefoxAndroid();
+ this.isIOS = Util.isIOS();
+
+ this.orientationOut_ = new Float32Array(4);
+}
+
+FusionPoseSensor.prototype.getPosition = function() {
+ // This PoseSensor doesn't support position
+ return null;
+};
+
+FusionPoseSensor.prototype.getOrientation = function() {
+ // Convert from filter space to the the same system used by the
+ // deviceorientation event.
+ var orientation = this.filter.getOrientation();
+
+ // Predict orientation.
+ this.predictedQ = this.posePredictor.getPrediction(orientation, this.gyroscope, this.previousTimestampS);
+
+ // Convert to THREE coordinate system: -Z forward, Y up, X right.
+ var out = new MathUtil.Quaternion();
+ out.copy(this.filterToWorldQ);
+ out.multiply(this.resetQ);
+ if (!WebVRConfig.TOUCH_PANNER_DISABLED) {
+ out.multiply(this.touchPanner.getOrientation());
+ }
+ out.multiply(this.predictedQ);
+ out.multiply(this.worldToScreenQ);
+
+ // Handle the yaw-only case.
+ if (WebVRConfig.YAW_ONLY) {
+ // Make a quaternion that only turns around the Y-axis.
+ out.x = 0;
+ out.z = 0;
+ out.normalize();
+ }
+
+ this.orientationOut_[0] = out.x;
+ this.orientationOut_[1] = out.y;
+ this.orientationOut_[2] = out.z;
+ this.orientationOut_[3] = out.w;
+ return this.orientationOut_;
+};
+
+FusionPoseSensor.prototype.resetPose = function() {
+ // Reduce to inverted yaw-only.
+ this.resetQ.copy(this.filter.getOrientation());
+ this.resetQ.x = 0;
+ this.resetQ.y = 0;
+ this.resetQ.z *= -1;
+ this.resetQ.normalize();
+
+ // Take into account extra transformations in landscape mode.
+ if (Util.isLandscapeMode()) {
+ this.resetQ.multiply(this.inverseWorldToScreenQ);
+ }
+
+ // Take into account original pose.
+ this.resetQ.multiply(this.originalPoseAdjustQ);
+
+ if (!WebVRConfig.TOUCH_PANNER_DISABLED) {
+ this.touchPanner.resetSensor();
+ }
+};
+
+FusionPoseSensor.prototype.onDeviceMotionChange_ = function(deviceMotion) {
+ var accGravity = deviceMotion.accelerationIncludingGravity;
+ var rotRate = deviceMotion.rotationRate;
+ var timestampS = deviceMotion.timeStamp / 1000;
+
+ // Firefox Android timeStamp returns one thousandth of a millisecond.
+ if (this.isFirefoxAndroid) {
+ timestampS /= 1000;
+ }
+
+ var deltaS = timestampS - this.previousTimestampS;
+ if (deltaS <= Util.MIN_TIMESTEP || deltaS > Util.MAX_TIMESTEP) {
+ console.warn('Invalid timestamps detected. Time step between successive ' +
+ 'gyroscope sensor samples is very small or not monotonic');
+ this.previousTimestampS = timestampS;
+ return;
+ }
+ this.accelerometer.set(-accGravity.x, -accGravity.y, -accGravity.z);
+ this.gyroscope.set(rotRate.alpha, rotRate.beta, rotRate.gamma);
+
+ // With iOS and Firefox Android, rotationRate is reported in degrees,
+ // so we first convert to radians.
+ if (this.isIOS || this.isFirefoxAndroid) {
+ this.gyroscope.multiplyScalar(Math.PI / 180);
+ }
+
+ this.filter.addAccelMeasurement(this.accelerometer, timestampS);
+ this.filter.addGyroMeasurement(this.gyroscope, timestampS);
+
+ this.previousTimestampS = timestampS;
+};
+
+FusionPoseSensor.prototype.onScreenOrientationChange_ =
+ function(screenOrientation) {
+ this.setScreenTransform_();
+};
+
+FusionPoseSensor.prototype.setScreenTransform_ = function() {
+ this.worldToScreenQ.set(0, 0, 0, 1);
+ switch (window.orientation) {
+ case 0:
+ break;
+ case 90:
+ this.worldToScreenQ.setFromAxisAngle(new MathUtil.Vector3(0, 0, 1), -Math.PI / 2);
+ break;
+ case -90:
+ this.worldToScreenQ.setFromAxisAngle(new MathUtil.Vector3(0, 0, 1), Math.PI / 2);
+ break;
+ case 180:
+ // TODO.
+ break;
+ }
+ this.inverseWorldToScreenQ.copy(this.worldToScreenQ);
+ this.inverseWorldToScreenQ.inverse();
+};
+
+module.exports = FusionPoseSensor;
+
+},{"../math-util.js":14,"../touch-panner.js":21,"../util.js":22,"./complementary-filter.js":17,"./pose-predictor.js":19}],19:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var MathUtil = _dereq_('../math-util.js');
+var DEBUG = false;
+
+/**
+ * Given an orientation and the gyroscope data, predicts the future orientation
+ * of the head. This makes rendering appear faster.
+ *
+ * Also see: http://msl.cs.uiuc.edu/~lavalle/papers/LavYerKatAnt14.pdf
+ *
+ * @param {Number} predictionTimeS time from head movement to the appearance of
+ * the corresponding image.
+ */
+function PosePredictor(predictionTimeS) {
+ this.predictionTimeS = predictionTimeS;
+
+ // The quaternion corresponding to the previous state.
+ this.previousQ = new MathUtil.Quaternion();
+ // Previous time a prediction occurred.
+ this.previousTimestampS = null;
+
+ // The delta quaternion that adjusts the current pose.
+ this.deltaQ = new MathUtil.Quaternion();
+ // The output quaternion.
+ this.outQ = new MathUtil.Quaternion();
+}
+
+PosePredictor.prototype.getPrediction = function(currentQ, gyro, timestampS) {
+ if (!this.previousTimestampS) {
+ this.previousQ.copy(currentQ);
+ this.previousTimestampS = timestampS;
+ return currentQ;
+ }
+
+ // Calculate axis and angle based on gyroscope rotation rate data.
+ var axis = new MathUtil.Vector3();
+ axis.copy(gyro);
+ axis.normalize();
+
+ var angularSpeed = gyro.length();
+
+ // If we're rotating slowly, don't do prediction.
+ if (angularSpeed < MathUtil.degToRad * 20) {
+ if (DEBUG) {
+ console.log('Moving slowly, at %s deg/s: no prediction',
+ (MathUtil.radToDeg * angularSpeed).toFixed(1));
+ }
+ this.outQ.copy(currentQ);
+ this.previousQ.copy(currentQ);
+ return this.outQ;
+ }
+
+ // Get the predicted angle based on the time delta and latency.
+ var deltaT = timestampS - this.previousTimestampS;
+ var predictAngle = angularSpeed * this.predictionTimeS;
+
+ this.deltaQ.setFromAxisAngle(axis, predictAngle);
+ this.outQ.copy(this.previousQ);
+ this.outQ.multiply(this.deltaQ);
+
+ this.previousQ.copy(currentQ);
+
+ return this.outQ;
+};
+
+
+module.exports = PosePredictor;
+
+},{"../math-util.js":14}],20:[function(_dereq_,module,exports){
+function SensorSample(sample, timestampS) {
+ this.set(sample, timestampS);
+};
+
+SensorSample.prototype.set = function(sample, timestampS) {
+ this.sample = sample;
+ this.timestampS = timestampS;
+};
+
+SensorSample.prototype.copy = function(sensorSample) {
+ this.set(sensorSample.sample, sensorSample.timestampS);
+};
+
+module.exports = SensorSample;
+
+},{}],21:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var MathUtil = _dereq_('./math-util.js');
+var Util = _dereq_('./util.js');
+
+var ROTATE_SPEED = 0.5;
+/**
+ * Provides a quaternion responsible for pre-panning the scene before further
+ * transformations due to device sensors.
+ */
+function TouchPanner() {
+ window.addEventListener('touchstart', this.onTouchStart_.bind(this));
+ window.addEventListener('touchmove', this.onTouchMove_.bind(this));
+ window.addEventListener('touchend', this.onTouchEnd_.bind(this));
+
+ this.isTouching = false;
+ this.rotateStart = new MathUtil.Vector2();
+ this.rotateEnd = new MathUtil.Vector2();
+ this.rotateDelta = new MathUtil.Vector2();
+
+ this.theta = 0;
+ this.orientation = new MathUtil.Quaternion();
+}
+
+TouchPanner.prototype.getOrientation = function() {
+ this.orientation.setFromEulerXYZ(0, 0, this.theta);
+ return this.orientation;
+};
+
+TouchPanner.prototype.resetSensor = function() {
+ this.theta = 0;
+};
+
+TouchPanner.prototype.onTouchStart_ = function(e) {
+ // Only respond if there is exactly one touch.
+ if (e.touches.length != 1) {
+ return;
+ }
+ this.rotateStart.set(e.touches[0].pageX, e.touches[0].pageY);
+ this.isTouching = true;
+};
+
+TouchPanner.prototype.onTouchMove_ = function(e) {
+ if (!this.isTouching) {
+ return;
+ }
+ this.rotateEnd.set(e.touches[0].pageX, e.touches[0].pageY);
+ this.rotateDelta.subVectors(this.rotateEnd, this.rotateStart);
+ this.rotateStart.copy(this.rotateEnd);
+
+ // On iOS, direction is inverted.
+ if (Util.isIOS()) {
+ this.rotateDelta.x *= -1;
+ }
+
+ var element = document.body;
+ this.theta += 2 * Math.PI * this.rotateDelta.x / element.clientWidth * ROTATE_SPEED;
+};
+
+TouchPanner.prototype.onTouchEnd_ = function(e) {
+ this.isTouching = false;
+};
+
+module.exports = TouchPanner;
+
+},{"./math-util.js":14,"./util.js":22}],22:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var objectAssign = _dereq_('object-assign');
+
+var Util = window.Util || {};
+
+Util.MIN_TIMESTEP = 0.001;
+Util.MAX_TIMESTEP = 1;
+
+Util.base64 = function(mimeType, base64) {
+ return 'data:' + mimeType + ';base64,' + base64;
+};
+
+Util.clamp = function(value, min, max) {
+ return Math.min(Math.max(min, value), max);
+};
+
+Util.lerp = function(a, b, t) {
+ return a + ((b - a) * t);
+};
+
+Util.isIOS = (function() {
+ var isIOS = /iPad|iPhone|iPod/.test(navigator.platform);
+ return function() {
+ return isIOS;
+ };
+})();
+
+Util.isSafari = (function() {
+ var isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
+ return function() {
+ return isSafari;
+ };
+})();
+
+Util.isFirefoxAndroid = (function() {
+ var isFirefoxAndroid = navigator.userAgent.indexOf('Firefox') !== -1 &&
+ navigator.userAgent.indexOf('Android') !== -1;
+ return function() {
+ return isFirefoxAndroid;
+ };
+})();
+
+Util.isLandscapeMode = function() {
+ return (window.orientation == 90 || window.orientation == -90);
+};
+
+// Helper method to validate the time steps of sensor timestamps.
+Util.isTimestampDeltaValid = function(timestampDeltaS) {
+ if (isNaN(timestampDeltaS)) {
+ return false;
+ }
+ if (timestampDeltaS <= Util.MIN_TIMESTEP) {
+ return false;
+ }
+ if (timestampDeltaS > Util.MAX_TIMESTEP) {
+ return false;
+ }
+ return true;
+};
+
+Util.getScreenWidth = function() {
+ return Math.max(window.screen.width, window.screen.height) *
+ window.devicePixelRatio;
+};
+
+Util.getScreenHeight = function() {
+ return Math.min(window.screen.width, window.screen.height) *
+ window.devicePixelRatio;
+};
+
+Util.requestFullscreen = function(element) {
+ if (element.requestFullscreen) {
+ element.requestFullscreen();
+ } else if (element.webkitRequestFullscreen) {
+ element.webkitRequestFullscreen();
+ } else if (element.mozRequestFullScreen) {
+ element.mozRequestFullScreen();
+ } else if (element.msRequestFullscreen) {
+ element.msRequestFullscreen();
+ } else {
+ return false;
+ }
+
+ return true;
+};
+
+Util.exitFullscreen = function() {
+ if (document.exitFullscreen) {
+ document.exitFullscreen();
+ } else if (document.webkitExitFullscreen) {
+ document.webkitExitFullscreen();
+ } else if (document.mozCancelFullScreen) {
+ document.mozCancelFullScreen();
+ } else if (document.msExitFullscreen) {
+ document.msExitFullscreen();
+ } else {
+ return false;
+ }
+
+ return true;
+};
+
+Util.getFullscreenElement = function() {
+ return document.fullscreenElement ||
+ document.webkitFullscreenElement ||
+ document.mozFullScreenElement ||
+ document.msFullscreenElement;
+};
+
+Util.linkProgram = function(gl, vertexSource, fragmentSource, attribLocationMap) {
+ // No error checking for brevity.
+ var vertexShader = gl.createShader(gl.VERTEX_SHADER);
+ gl.shaderSource(vertexShader, vertexSource);
+ gl.compileShader(vertexShader);
+
+ var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
+ gl.shaderSource(fragmentShader, fragmentSource);
+ gl.compileShader(fragmentShader);
+
+ var program = gl.createProgram();
+ gl.attachShader(program, vertexShader);
+ gl.attachShader(program, fragmentShader);
+
+ for (var attribName in attribLocationMap)
+ gl.bindAttribLocation(program, attribLocationMap[attribName], attribName);
+
+ gl.linkProgram(program);
+
+ gl.deleteShader(vertexShader);
+ gl.deleteShader(fragmentShader);
+
+ return program;
+};
+
+Util.getProgramUniforms = function(gl, program) {
+ var uniforms = {};
+ var uniformCount = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS);
+ var uniformName = '';
+ for (var i = 0; i < uniformCount; i++) {
+ var uniformInfo = gl.getActiveUniform(program, i);
+ uniformName = uniformInfo.name.replace('[0]', '');
+ uniforms[uniformName] = gl.getUniformLocation(program, uniformName);
+ }
+ return uniforms;
+};
+
+Util.orthoMatrix = function (out, left, right, bottom, top, near, far) {
+ var lr = 1 / (left - right),
+ bt = 1 / (bottom - top),
+ nf = 1 / (near - far);
+ out[0] = -2 * lr;
+ out[1] = 0;
+ out[2] = 0;
+ out[3] = 0;
+ out[4] = 0;
+ out[5] = -2 * bt;
+ out[6] = 0;
+ out[7] = 0;
+ out[8] = 0;
+ out[9] = 0;
+ out[10] = 2 * nf;
+ out[11] = 0;
+ out[12] = (left + right) * lr;
+ out[13] = (top + bottom) * bt;
+ out[14] = (far + near) * nf;
+ out[15] = 1;
+ return out;
+};
+
+Util.isMobile = function() {
+ var check = false;
+ (function(a){if(/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i.test(a)||/1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i.test(a.substr(0,4)))check = true})(navigator.userAgent||navigator.vendor||window.opera);
+ return check;
+};
+
+Util.extend = objectAssign;
+
+Util.safariCssSizeWorkaround = function(canvas) {
+ // TODO(smus): Remove this workaround when Safari for iOS is fixed.
+ // iOS only workaround (for https://bugs.webkit.org/show_bug.cgi?id=152556).
+ //
+ // "To the last I grapple with thee;
+ // from hell's heart I stab at thee;
+ // for hate's sake I spit my last breath at thee."
+ // -- Moby Dick, by Herman Melville
+ if (Util.isIOS()) {
+ var width = canvas.style.width;
+ var height = canvas.style.height;
+ canvas.style.width = (parseInt(width) + 1) + 'px';
+ canvas.style.height = (parseInt(height)) + 'px';
+ console.log('Resetting width to...', width);
+ setTimeout(function() {
+ console.log('Done. Width is now', width);
+ canvas.style.width = width;
+ canvas.style.height = height;
+ }, 100);
+ }
+
+ // Debug only.
+ window.Util = Util;
+ window.canvas = canvas;
+};
+
+Util.frameDataFromPose = (function() {
+ var piOver180 = Math.PI / 180.0;
+ var rad45 = Math.PI * 0.25;
+
+ // Borrowed from glMatrix.
+ function mat4_perspectiveFromFieldOfView(out, fov, near, far) {
+ var upTan = Math.tan(fov ? (fov.upDegrees * piOver180) : rad45),
+ downTan = Math.tan(fov ? (fov.downDegrees * piOver180) : rad45),
+ leftTan = Math.tan(fov ? (fov.leftDegrees * piOver180) : rad45),
+ rightTan = Math.tan(fov ? (fov.rightDegrees * piOver180) : rad45),
+ xScale = 2.0 / (leftTan + rightTan),
+ yScale = 2.0 / (upTan + downTan);
+
+ out[0] = xScale;
+ out[1] = 0.0;
+ out[2] = 0.0;
+ out[3] = 0.0;
+ out[4] = 0.0;
+ out[5] = yScale;
+ out[6] = 0.0;
+ out[7] = 0.0;
+ out[8] = -((leftTan - rightTan) * xScale * 0.5);
+ out[9] = ((upTan - downTan) * yScale * 0.5);
+ out[10] = far / (near - far);
+ out[11] = -1.0;
+ out[12] = 0.0;
+ out[13] = 0.0;
+ out[14] = (far * near) / (near - far);
+ out[15] = 0.0;
+ return out;
+ }
+
+ function mat4_fromRotationTranslation(out, q, v) {
+ // Quaternion math
+ var x = q[0], y = q[1], z = q[2], w = q[3],
+ x2 = x + x,
+ y2 = y + y,
+ z2 = z + z,
+
+ xx = x * x2,
+ xy = x * y2,
+ xz = x * z2,
+ yy = y * y2,
+ yz = y * z2,
+ zz = z * z2,
+ wx = w * x2,
+ wy = w * y2,
+ wz = w * z2;
+
+ out[0] = 1 - (yy + zz);
+ out[1] = xy + wz;
+ out[2] = xz - wy;
+ out[3] = 0;
+ out[4] = xy - wz;
+ out[5] = 1 - (xx + zz);
+ out[6] = yz + wx;
+ out[7] = 0;
+ out[8] = xz + wy;
+ out[9] = yz - wx;
+ out[10] = 1 - (xx + yy);
+ out[11] = 0;
+ out[12] = v[0];
+ out[13] = v[1];
+ out[14] = v[2];
+ out[15] = 1;
+
+ return out;
+ };
+
+ function mat4_translate(out, a, v) {
+ var x = v[0], y = v[1], z = v[2],
+ a00, a01, a02, a03,
+ a10, a11, a12, a13,
+ a20, a21, a22, a23;
+
+ if (a === out) {
+ out[12] = a[0] * x + a[4] * y + a[8] * z + a[12];
+ out[13] = a[1] * x + a[5] * y + a[9] * z + a[13];
+ out[14] = a[2] * x + a[6] * y + a[10] * z + a[14];
+ out[15] = a[3] * x + a[7] * y + a[11] * z + a[15];
+ } else {
+ a00 = a[0]; a01 = a[1]; a02 = a[2]; a03 = a[3];
+ a10 = a[4]; a11 = a[5]; a12 = a[6]; a13 = a[7];
+ a20 = a[8]; a21 = a[9]; a22 = a[10]; a23 = a[11];
+
+ out[0] = a00; out[1] = a01; out[2] = a02; out[3] = a03;
+ out[4] = a10; out[5] = a11; out[6] = a12; out[7] = a13;
+ out[8] = a20; out[9] = a21; out[10] = a22; out[11] = a23;
+
+ out[12] = a00 * x + a10 * y + a20 * z + a[12];
+ out[13] = a01 * x + a11 * y + a21 * z + a[13];
+ out[14] = a02 * x + a12 * y + a22 * z + a[14];
+ out[15] = a03 * x + a13 * y + a23 * z + a[15];
+ }
+
+ return out;
+ };
+
+ mat4_invert = function(out, a) {
+ var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
+ a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
+ a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
+ a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15],
+
+ b00 = a00 * a11 - a01 * a10,
+ b01 = a00 * a12 - a02 * a10,
+ b02 = a00 * a13 - a03 * a10,
+ b03 = a01 * a12 - a02 * a11,
+ b04 = a01 * a13 - a03 * a11,
+ b05 = a02 * a13 - a03 * a12,
+ b06 = a20 * a31 - a21 * a30,
+ b07 = a20 * a32 - a22 * a30,
+ b08 = a20 * a33 - a23 * a30,
+ b09 = a21 * a32 - a22 * a31,
+ b10 = a21 * a33 - a23 * a31,
+ b11 = a22 * a33 - a23 * a32,
+
+ // Calculate the determinant
+ det = b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06;
+
+ if (!det) {
+ return null;
+ }
+ det = 1.0 / det;
+
+ out[0] = (a11 * b11 - a12 * b10 + a13 * b09) * det;
+ out[1] = (a02 * b10 - a01 * b11 - a03 * b09) * det;
+ out[2] = (a31 * b05 - a32 * b04 + a33 * b03) * det;
+ out[3] = (a22 * b04 - a21 * b05 - a23 * b03) * det;
+ out[4] = (a12 * b08 - a10 * b11 - a13 * b07) * det;
+ out[5] = (a00 * b11 - a02 * b08 + a03 * b07) * det;
+ out[6] = (a32 * b02 - a30 * b05 - a33 * b01) * det;
+ out[7] = (a20 * b05 - a22 * b02 + a23 * b01) * det;
+ out[8] = (a10 * b10 - a11 * b08 + a13 * b06) * det;
+ out[9] = (a01 * b08 - a00 * b10 - a03 * b06) * det;
+ out[10] = (a30 * b04 - a31 * b02 + a33 * b00) * det;
+ out[11] = (a21 * b02 - a20 * b04 - a23 * b00) * det;
+ out[12] = (a11 * b07 - a10 * b09 - a12 * b06) * det;
+ out[13] = (a00 * b09 - a01 * b07 + a02 * b06) * det;
+ out[14] = (a31 * b01 - a30 * b03 - a32 * b00) * det;
+ out[15] = (a20 * b03 - a21 * b01 + a22 * b00) * det;
+
+ return out;
+ };
+
+ var defaultOrientation = new Float32Array([0, 0, 0, 1]);
+ var defaultPosition = new Float32Array([0, 0, 0]);
+
+ function updateEyeMatrices(projection, view, pose, parameters, vrDisplay) {
+ mat4_perspectiveFromFieldOfView(projection, parameters ? parameters.fieldOfView : null, vrDisplay.depthNear, vrDisplay.depthFar);
+
+ var orientation = pose.orientation || defaultOrientation;
+ var position = pose.position || defaultPosition;
+
+ mat4_fromRotationTranslation(view, orientation, position);
+ if (parameters)
+ mat4_translate(view, view, parameters.offset);
+ mat4_invert(view, view);
+ }
+
+ return function(frameData, pose, vrDisplay) {
+ if (!frameData || !pose)
+ return false;
+
+ frameData.pose = pose;
+ frameData.timestamp = pose.timestamp;
+
+ updateEyeMatrices(
+ frameData.leftProjectionMatrix, frameData.leftViewMatrix,
+ pose, vrDisplay.getEyeParameters("left"), vrDisplay);
+ updateEyeMatrices(
+ frameData.rightProjectionMatrix, frameData.rightViewMatrix,
+ pose, vrDisplay.getEyeParameters("right"), vrDisplay);
+
+ return true;
+ };
+})();
+
+module.exports = Util;
+
+},{"object-assign":1}],23:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var Emitter = _dereq_('./emitter.js');
+var Util = _dereq_('./util.js');
+var DeviceInfo = _dereq_('./device-info.js');
+
+var DEFAULT_VIEWER = 'CardboardV1';
+var VIEWER_KEY = 'WEBVR_CARDBOARD_VIEWER';
+var CLASS_NAME = 'webvr-polyfill-viewer-selector';
+
+/**
+ * Creates a viewer selector with the options specified. Supports being shown
+ * and hidden. Generates events when viewer parameters change. Also supports
+ * saving the currently selected index in localStorage.
+ */
+function ViewerSelector() {
+ // Try to load the selected key from local storage. If none exists, use the
+ // default key.
+ try {
+ this.selectedKey = localStorage.getItem(VIEWER_KEY) || DEFAULT_VIEWER;
+ } catch (error) {
+ console.error('Failed to load viewer profile: %s', error);
+ }
+ this.dialog = this.createDialog_(DeviceInfo.Viewers);
+ this.root = null;
+}
+ViewerSelector.prototype = new Emitter();
+
+ViewerSelector.prototype.show = function(root) {
+ this.root = root;
+
+ root.appendChild(this.dialog);
+ //console.log('ViewerSelector.show');
+
+ // Ensure the currently selected item is checked.
+ var selected = this.dialog.querySelector('#' + this.selectedKey);
+ selected.checked = true;
+
+ // Show the UI.
+ this.dialog.style.display = 'block';
+};
+
+ViewerSelector.prototype.hide = function() {
+ if (this.root && this.root.contains(this.dialog)) {
+ this.root.removeChild(this.dialog);
+ }
+ //console.log('ViewerSelector.hide');
+ this.dialog.style.display = 'none';
+};
+
+ViewerSelector.prototype.getCurrentViewer = function() {
+ return DeviceInfo.Viewers[this.selectedKey];
+};
+
+ViewerSelector.prototype.getSelectedKey_ = function() {
+ var input = this.dialog.querySelector('input[name=field]:checked');
+ if (input) {
+ return input.id;
+ }
+ return null;
+};
+
+ViewerSelector.prototype.onSave_ = function() {
+ this.selectedKey = this.getSelectedKey_();
+ if (!this.selectedKey || !DeviceInfo.Viewers[this.selectedKey]) {
+ console.error('ViewerSelector.onSave_: this should never happen!');
+ return;
+ }
+
+ this.emit('change', DeviceInfo.Viewers[this.selectedKey]);
+
+ // Attempt to save the viewer profile, but fails in private mode.
+ try {
+ localStorage.setItem(VIEWER_KEY, this.selectedKey);
+ } catch(error) {
+ console.error('Failed to save viewer profile: %s', error);
+ }
+ this.hide();
+};
+
+/**
+ * Creates the dialog.
+ */
+ViewerSelector.prototype.createDialog_ = function(options) {
+ var container = document.createElement('div');
+ container.classList.add(CLASS_NAME);
+ container.style.display = 'none';
+ // Create an overlay that dims the background, and which goes away when you
+ // tap it.
+ var overlay = document.createElement('div');
+ var s = overlay.style;
+ s.position = 'fixed';
+ s.left = 0;
+ s.top = 0;
+ s.width = '100%';
+ s.height = '100%';
+ s.background = 'rgba(0, 0, 0, 0.3)';
+ overlay.addEventListener('click', this.hide.bind(this));
+
+ var width = 280;
+ var dialog = document.createElement('div');
+ var s = dialog.style;
+ s.boxSizing = 'border-box';
+ s.position = 'fixed';
+ s.top = '24px';
+ s.left = '50%';
+ s.marginLeft = (-width/2) + 'px';
+ s.width = width + 'px';
+ s.padding = '24px';
+ s.overflow = 'hidden';
+ s.background = '#fafafa';
+ s.fontFamily = "'Roboto', sans-serif";
+ s.boxShadow = '0px 5px 20px #666';
+
+ dialog.appendChild(this.createH1_('Select your viewer'));
+ for (var id in options) {
+ dialog.appendChild(this.createChoice_(id, options[id].label));
+ }
+ dialog.appendChild(this.createButton_('Save', this.onSave_.bind(this)));
+
+ container.appendChild(overlay);
+ container.appendChild(dialog);
+
+ return container;
+};
+
+ViewerSelector.prototype.createH1_ = function(name) {
+ var h1 = document.createElement('h1');
+ var s = h1.style;
+ s.color = 'black';
+ s.fontSize = '20px';
+ s.fontWeight = 'bold';
+ s.marginTop = 0;
+ s.marginBottom = '24px';
+ h1.innerHTML = name;
+ return h1;
+};
+
+ViewerSelector.prototype.createChoice_ = function(id, name) {
+ /*
+ <div class="choice">
+ <input id="v1" type="radio" name="field" value="v1">
+ <label for="v1">Cardboard V1</label>
+ </div>
+ */
+ var div = document.createElement('div');
+ div.style.marginTop = '8px';
+ div.style.color = 'black';
+
+ var input = document.createElement('input');
+ input.style.fontSize = '30px';
+ input.setAttribute('id', id);
+ input.setAttribute('type', 'radio');
+ input.setAttribute('value', id);
+ input.setAttribute('name', 'field');
+
+ var label = document.createElement('label');
+ label.style.marginLeft = '4px';
+ label.setAttribute('for', id);
+ label.innerHTML = name;
+
+ div.appendChild(input);
+ div.appendChild(label);
+
+ return div;
+};
+
+ViewerSelector.prototype.createButton_ = function(label, onclick) {
+ var button = document.createElement('button');
+ button.innerHTML = label;
+ var s = button.style;
+ s.float = 'right';
+ s.textTransform = 'uppercase';
+ s.color = '#1094f7';
+ s.fontSize = '14px';
+ s.letterSpacing = 0;
+ s.border = 0;
+ s.background = 'none';
+ s.marginTop = '16px';
+
+ button.addEventListener('click', onclick);
+
+ return button;
+};
+
+module.exports = ViewerSelector;
+
+},{"./device-info.js":7,"./emitter.js":12,"./util.js":22}],24:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var Util = _dereq_('./util.js');
+
+/**
+ * Android and iOS compatible wakelock implementation.
+ *
+ * Refactored thanks to dkovalev@.
+ */
+function AndroidWakeLock() {
+ var video = document.createElement('video');
+
+ video.addEventListener('ended', function() {
+ video.play();
+ });
+
+ this.request = function() {
+ if (video.paused) {
+ // Base64 version of videos_src/no-sleep-120s.mp4.
+ video.src = Util.base64('video/mp4', 'AAAAGGZ0eXBpc29tAAAAAG1wNDFhdmMxAAAIA21vb3YAAABsbXZoZAAAAADSa9v60mvb+gABX5AAlw/gAAEAAAEAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAdkdHJhawAAAFx0a2hkAAAAAdJr2/rSa9v6AAAAAQAAAAAAlw/gAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAAQAAAAHAAAAAAAJGVkdHMAAAAcZWxzdAAAAAAAAAABAJcP4AAAAAAAAQAAAAAG3G1kaWEAAAAgbWRoZAAAAADSa9v60mvb+gAPQkAGjneAFccAAAAAAC1oZGxyAAAAAAAAAAB2aWRlAAAAAAAAAAAAAAAAVmlkZW9IYW5kbGVyAAAABodtaW5mAAAAFHZtaGQAAAABAAAAAAAAAAAAAAAkZGluZgAAABxkcmVmAAAAAAAAAAEAAAAMdXJsIAAAAAEAAAZHc3RibAAAAJdzdHNkAAAAAAAAAAEAAACHYXZjMQAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAMABwASAAAAEgAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABj//wAAADFhdmNDAWQAC//hABlnZAALrNlfllw4QAAAAwBAAAADAKPFCmWAAQAFaOvssiwAAAAYc3R0cwAAAAAAAAABAAAAbgAPQkAAAAAUc3RzcwAAAAAAAAABAAAAAQAAA4BjdHRzAAAAAAAAAG4AAAABAD0JAAAAAAEAehIAAAAAAQA9CQAAAAABAAAAAAAAAAEAD0JAAAAAAQBMS0AAAAABAB6EgAAAAAEAAAAAAAAAAQAPQkAAAAABAExLQAAAAAEAHoSAAAAAAQAAAAAAAAABAA9CQAAAAAEATEtAAAAAAQAehIAAAAABAAAAAAAAAAEAD0JAAAAAAQBMS0AAAAABAB6EgAAAAAEAAAAAAAAAAQAPQkAAAAABAExLQAAAAAEAHoSAAAAAAQAAAAAAAAABAA9CQAAAAAEATEtAAAAAAQAehIAAAAABAAAAAAAAAAEAD0JAAAAAAQBMS0AAAAABAB6EgAAAAAEAAAAAAAAAAQAPQkAAAAABAExLQAAAAAEAHoSAAAAAAQAAAAAAAAABAA9CQAAAAAEATEtAAAAAAQAehIAAAAABAAAAAAAAAAEAD0JAAAAAAQBMS0AAAAABAB6EgAAAAAEAAAAAAAAAAQAPQkAAAAABAExLQAAAAAEAHoSAAAAAAQAAAAAAAAABAA9CQAAAAAEATEtAAAAAAQAehIAAAAABAAAAAAAAAAEAD0JAAAAAAQBMS0AAAAABAB6EgAAAAAEAAAAAAAAAAQAPQkAAAAABAExLQAAAAAEAHoSAAAAAAQAAAAAAAAABAA9CQAAAAAEATEtAAAAAAQAehIAAAAABAAAAAAAAAAEAD0JAAAAAAQBMS0AAAAABAB6EgAAAAAEAAAAAAAAAAQAPQkAAAAABAExLQAAAAAEAHoSAAAAAAQAAAAAAAAABAA9CQAAAAAEATEtAAAAAAQAehIAAAAABAAAAAAAAAAEAD0JAAAAAAQBMS0AAAAABAB6EgAAAAAEAAAAAAAAAAQAPQkAAAAABAExLQAAAAAEAHoSAAAAAAQAAAAAAAAABAA9CQAAAAAEATEtAAAAAAQAehIAAAAABAAAAAAAAAAEAD0JAAAAAAQBMS0AAAAABAB6EgAAAAAEAAAAAAAAAAQAPQkAAAAABAExLQAAAAAEAHoSAAAAAAQAAAAAAAAABAA9CQAAAAAEATEtAAAAAAQAehIAAAAABAAAAAAAAAAEAD0JAAAAAAQBMS0AAAAABAB6EgAAAAAEAAAAAAAAAAQAPQkAAAAABAExLQAAAAAEAHoSAAAAAAQAAAAAAAAABAA9CQAAAAAEALcbAAAAAHHN0c2MAAAAAAAAAAQAAAAEAAABuAAAAAQAAAcxzdHN6AAAAAAAAAAAAAABuAAADCQAAABgAAAAOAAAADgAAAAwAAAASAAAADgAAAAwAAAAMAAAAEgAAAA4AAAAMAAAADAAAABIAAAAOAAAADAAAAAwAAAASAAAADgAAAAwAAAAMAAAAEgAAAA4AAAAMAAAADAAAABIAAAAOAAAADAAAAAwAAAASAAAADgAAAAwAAAAMAAAAEgAAAA4AAAAMAAAADAAAABIAAAAOAAAADAAAAAwAAAASAAAADgAAAAwAAAAMAAAAEgAAAA4AAAAMAAAADAAAABIAAAAOAAAADAAAAAwAAAASAAAADgAAAAwAAAAMAAAAEgAAAA4AAAAMAAAADAAAABIAAAAOAAAADAAAAAwAAAASAAAADgAAAAwAAAAMAAAAEgAAAA4AAAAMAAAADAAAABIAAAAOAAAADAAAAAwAAAASAAAADgAAAAwAAAAMAAAAEgAAAA4AAAAMAAAADAAAABIAAAAOAAAADAAAAAwAAAASAAAADgAAAAwAAAAMAAAAEgAAAA4AAAAMAAAADAAAABIAAAAOAAAADAAAAAwAAAASAAAADgAAAAwAAAAMAAAAEgAAAA4AAAAMAAAADAAAABMAAAAUc3RjbwAAAAAAAAABAAAIKwAAACt1ZHRhAAAAI6llbmMAFwAAdmxjIDIuMi4xIHN0cmVhbSBvdXRwdXQAAAAId2lkZQAACRRtZGF0AAACrgX//6vcRem95tlIt5Ys2CDZI+7veDI2NCAtIGNvcmUgMTQyIC0gSC4yNjQvTVBFRy00IEFWQyBjb2RlYyAtIENvcHlsZWZ0IDIwMDMtMjAxNCAtIGh0dHA6Ly93d3cudmlkZW9sYW4ub3JnL3gyNjQuaHRtbCAtIG9wdGlvbnM6IGNhYmFjPTEgcmVmPTMgZGVibG9jaz0xOjA6MCBhbmFseXNlPTB4MzoweDEzIG1lPWhleCBzdWJtZT03IHBzeT0xIHBzeV9yZD0xLjAwOjAuMDAgbWl4ZWRfcmVmPTEgbWVfcmFuZ2U9MTYgY2hyb21hX21lPTEgdHJlbGxpcz0xIDh4OGRjdD0xIGNxbT0wIGRlYWR6b25lPTIxLDExIGZhc3RfcHNraXA9MSBjaHJvbWFfcXBfb2Zmc2V0PS0yIHRocmVhZHM9MTIgbG9va2FoZWFkX3RocmVhZHM9MSBzbGljZWRfdGhyZWFkcz0wIG5yPTAgZGVjaW1hdGU9MSBpbnRlcmxhY2VkPTAgYmx1cmF5X2NvbXBhdD0wIGNvbnN0cmFpbmVkX2ludHJhPTAgYmZyYW1lcz0zIGJfcHlyYW1pZD0yIGJfYWRhcHQ9MSBiX2JpYXM9MCBkaXJlY3Q9MSB3ZWlnaHRiPTEgb3Blbl9nb3A9MCB3ZWlnaHRwPTIga2V5aW50PTI1MCBrZXlpbnRfbWluPTEgc2NlbmVjdXQ9NDAgaW50cmFfcmVmcmVzaD0wIHJjX2xvb2thaGVhZD00MCByYz1hYnIgbWJ0cmVlPTEgYml0cmF0ZT0xMDAgcmF0ZXRvbD0xLjAgcWNvbXA9MC42MCBxcG1pbj0xMCBxcG1heD01MSBxcHN0ZXA9NCBpcF9yYXRpbz0xLjQwIGFxPTE6MS4wMACAAAAAU2WIhAAQ/8ltlOe+cTZuGkKg+aRtuivcDZ0pBsfsEi9p/i1yU9DxS2lq4dXTinViF1URBKXgnzKBd/Uh1bkhHtMrwrRcOJslD01UB+fyaL6ef+DBAAAAFEGaJGxBD5B+v+a+4QqF3MgBXz9MAAAACkGeQniH/+94r6EAAAAKAZ5hdEN/8QytwAAAAAgBnmNqQ3/EgQAAAA5BmmhJqEFomUwIIf/+4QAAAApBnoZFESw//76BAAAACAGepXRDf8SBAAAACAGep2pDf8SAAAAADkGarEmoQWyZTAgh//7gAAAACkGeykUVLD//voEAAAAIAZ7pdEN/xIAAAAAIAZ7rakN/xIAAAAAOQZrwSahBbJlMCCH//uEAAAAKQZ8ORRUsP/++gQAAAAgBny10Q3/EgQAAAAgBny9qQ3/EgAAAAA5BmzRJqEFsmUwIIf/+4AAAAApBn1JFFSw//76BAAAACAGfcXRDf8SAAAAACAGfc2pDf8SAAAAADkGbeEmoQWyZTAgh//7hAAAACkGflkUVLD//voAAAAAIAZ+1dEN/xIEAAAAIAZ+3akN/xIEAAAAOQZu8SahBbJlMCCH//uAAAAAKQZ/aRRUsP/++gQAAAAgBn/l0Q3/EgAAAAAgBn/tqQ3/EgQAAAA5Bm+BJqEFsmUwIIf/+4QAAAApBnh5FFSw//76AAAAACAGePXRDf8SAAAAACAGeP2pDf8SBAAAADkGaJEmoQWyZTAgh//7gAAAACkGeQkUVLD//voEAAAAIAZ5hdEN/xIAAAAAIAZ5jakN/xIEAAAAOQZpoSahBbJlMCCH//uEAAAAKQZ6GRRUsP/++gQAAAAgBnqV0Q3/EgQAAAAgBnqdqQ3/EgAAAAA5BmqxJqEFsmUwIIf/+4AAAAApBnspFFSw//76BAAAACAGe6XRDf8SAAAAACAGe62pDf8SAAAAADkGa8EmoQWyZTAgh//7hAAAACkGfDkUVLD//voEAAAAIAZ8tdEN/xIEAAAAIAZ8vakN/xIAAAAAOQZs0SahBbJlMCCH//uAAAAAKQZ9SRRUsP/++gQAAAAgBn3F0Q3/EgAAAAAgBn3NqQ3/EgAAAAA5Bm3hJqEFsmUwIIf/+4QAAAApBn5ZFFSw//76AAAAACAGftXRDf8SBAAAACAGft2pDf8SBAAAADkGbvEmoQWyZTAgh//7gAAAACkGf2kUVLD//voEAAAAIAZ/5dEN/xIAAAAAIAZ/7akN/xIEAAAAOQZvgSahBbJlMCCH//uEAAAAKQZ4eRRUsP/++gAAAAAgBnj10Q3/EgAAAAAgBnj9qQ3/EgQAAAA5BmiRJqEFsmUwIIf/+4AAAAApBnkJFFSw//76BAAAACAGeYXRDf8SAAAAACAGeY2pDf8SBAAAADkGaaEmoQWyZTAgh//7hAAAACkGehkUVLD//voEAAAAIAZ6ldEN/xIEAAAAIAZ6nakN/xIAAAAAOQZqsSahBbJlMCCH//uAAAAAKQZ7KRRUsP/++gQAAAAgBnul0Q3/EgAAAAAgBnutqQ3/EgAAAAA5BmvBJqEFsmUwIIf/+4QAAAApBnw5FFSw//76BAAAACAGfLXRDf8SBAAAACAGfL2pDf8SAAAAADkGbNEmoQWyZTAgh//7gAAAACkGfUkUVLD//voEAAAAIAZ9xdEN/xIAAAAAIAZ9zakN/xIAAAAAOQZt4SahBbJlMCCH//uEAAAAKQZ+WRRUsP/++gAAAAAgBn7V0Q3/EgQAAAAgBn7dqQ3/EgQAAAA5Bm7xJqEFsmUwIIf/+4AAAAApBn9pFFSw//76BAAAACAGf+XRDf8SAAAAACAGf+2pDf8SBAAAADkGb4EmoQWyZTAgh//7hAAAACkGeHkUVLD//voAAAAAIAZ49dEN/xIAAAAAIAZ4/akN/xIEAAAAOQZokSahBbJlMCCH//uAAAAAKQZ5CRRUsP/++gQAAAAgBnmF0Q3/EgAAAAAgBnmNqQ3/EgQAAAA5BmmhJqEFsmUwIIf/+4QAAAApBnoZFFSw//76BAAAACAGepXRDf8SBAAAACAGep2pDf8SAAAAADkGarEmoQWyZTAgh//7gAAAACkGeykUVLD//voEAAAAIAZ7pdEN/xIAAAAAIAZ7rakN/xIAAAAAPQZruSahBbJlMFEw3//7B');
+ video.play();
+ }
+ };
+
+ this.release = function() {
+ video.pause();
+ video.src = '';
+ };
+}
+
+function iOSWakeLock() {
+ var timer = null;
+
+ this.request = function() {
+ if (!timer) {
+ timer = setInterval(function() {
+ window.location = window.location;
+ setTimeout(window.stop, 0);
+ }, 30000);
+ }
+ }
+
+ this.release = function() {
+ if (timer) {
+ clearInterval(timer);
+ timer = null;
+ }
+ }
+}
+
+
+function getWakeLock() {
+ var userAgent = navigator.userAgent || navigator.vendor || window.opera;
+ if (userAgent.match(/iPhone/i) || userAgent.match(/iPod/i)) {
+ return iOSWakeLock;
+ } else {
+ return AndroidWakeLock;
+ }
+}
+
+module.exports = getWakeLock();
+},{"./util.js":22}],25:[function(_dereq_,module,exports){
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var Util = _dereq_('./util.js');
+var CardboardVRDisplay = _dereq_('./cardboard-vr-display.js');
+var MouseKeyboardVRDisplay = _dereq_('./mouse-keyboard-vr-display.js');
+// Uncomment to add positional tracking via webcam.
+//var WebcamPositionSensorVRDevice = require('./webcam-position-sensor-vr-device.js');
+var VRDisplay = _dereq_('./base.js').VRDisplay;
+var VRFrameData = _dereq_('./base.js').VRFrameData;
+var HMDVRDevice = _dereq_('./base.js').HMDVRDevice;
+var PositionSensorVRDevice = _dereq_('./base.js').PositionSensorVRDevice;
+var VRDisplayHMDDevice = _dereq_('./display-wrappers.js').VRDisplayHMDDevice;
+var VRDisplayPositionSensorDevice = _dereq_('./display-wrappers.js').VRDisplayPositionSensorDevice;
+
+function WebVRPolyfill() {
+ this.displays = [];
+ this.devices = []; // For deprecated objects
+ this.devicesPopulated = false;
+ this.nativeWebVRAvailable = this.isWebVRAvailable();
+ this.nativeLegacyWebVRAvailable = this.isDeprecatedWebVRAvailable();
+
+ if (!this.nativeLegacyWebVRAvailable) {
+ if (!this.nativeWebVRAvailable) {
+ this.enablePolyfill();
+ }
+ if (WebVRConfig.ENABLE_DEPRECATED_API) {
+ this.enableDeprecatedPolyfill();
+ }
+ }
+
+ // Put a shim in place to update the API to 1.1 if needed.
+ InstallWebVRSpecShim();
+}
+
+WebVRPolyfill.prototype.isWebVRAvailable = function() {
+ return ('getVRDisplays' in navigator);
+};
+
+WebVRPolyfill.prototype.isDeprecatedWebVRAvailable = function() {
+ return ('getVRDevices' in navigator) || ('mozGetVRDevices' in navigator);
+};
+
+WebVRPolyfill.prototype.populateDevices = function() {
+ if (this.devicesPopulated) {
+ return;
+ }
+
+ // Initialize our virtual VR devices.
+ var vrDisplay = null;
+
+ // Add a Cardboard VRDisplay on compatible mobile devices
+ if (this.isCardboardCompatible()) {
+ vrDisplay = new CardboardVRDisplay();
+ this.displays.push(vrDisplay);
+
+ // For backwards compatibility
+ if (WebVRConfig.ENABLE_DEPRECATED_API) {
+ this.devices.push(new VRDisplayHMDDevice(vrDisplay));
+ this.devices.push(new VRDisplayPositionSensorDevice(vrDisplay));
+ }
+ }
+
+ // Add a Mouse and Keyboard driven VRDisplay for desktops/laptops
+ if (!this.isMobile() && !WebVRConfig.MOUSE_KEYBOARD_CONTROLS_DISABLED) {
+ vrDisplay = new MouseKeyboardVRDisplay();
+ this.displays.push(vrDisplay);
+
+ // For backwards compatibility
+ if (WebVRConfig.ENABLE_DEPRECATED_API) {
+ this.devices.push(new VRDisplayHMDDevice(vrDisplay));
+ this.devices.push(new VRDisplayPositionSensorDevice(vrDisplay));
+ }
+ }
+
+ // Uncomment to add positional tracking via webcam.
+ //if (!this.isMobile() && WebVRConfig.ENABLE_DEPRECATED_API) {
+ // positionDevice = new WebcamPositionSensorVRDevice();
+ // this.devices.push(positionDevice);
+ //}
+
+ this.devicesPopulated = true;
+};
+
+WebVRPolyfill.prototype.enablePolyfill = function() {
+ // Provide navigator.getVRDisplays.
+ navigator.getVRDisplays = this.getVRDisplays.bind(this);
+
+ // Provide the VRDisplay object.
+ window.VRDisplay = VRDisplay;
+ // Provide the VRFrameData object.
+ window.VRFrameData = VRFrameData;
+};
+
+WebVRPolyfill.prototype.enableDeprecatedPolyfill = function() {
+ // Provide navigator.getVRDevices.
+ navigator.getVRDevices = this.getVRDevices.bind(this);
+
+ // Provide the CardboardHMDVRDevice and PositionSensorVRDevice objects.
+ window.HMDVRDevice = HMDVRDevice;
+ window.PositionSensorVRDevice = PositionSensorVRDevice;
+};
+
+WebVRPolyfill.prototype.getVRDisplays = function() {
+ this.populateDevices();
+ var displays = this.displays;
+ return new Promise(function(resolve, reject) {
+ try {
+ resolve(displays);
+ } catch (e) {
+ reject(e);
+ }
+ });
+};
+
+WebVRPolyfill.prototype.getVRDevices = function() {
+ console.warn('getVRDevices is deprecated. Please update your code to use getVRDisplays instead.');
+ var self = this;
+ return new Promise(function(resolve, reject) {
+ try {
+ if (!self.devicesPopulated) {
+ if (self.nativeWebVRAvailable) {
+ return navigator.getVRDisplays(function(displays) {
+ for (var i = 0; i < displays.length; ++i) {
+ self.devices.push(new VRDisplayHMDDevice(displays[i]));
+ self.devices.push(new VRDisplayPositionSensorDevice(displays[i]));
+ }
+ self.devicesPopulated = true;
+ resolve(self.devices);
+ }, reject);
+ }
+
+ if (self.nativeLegacyWebVRAvailable) {
+ return (navigator.getVRDDevices || navigator.mozGetVRDevices)(function(devices) {
+ for (var i = 0; i < devices.length; ++i) {
+ if (devices[i] instanceof HMDVRDevice) {
+ self.devices.push(devices[i]);
+ }
+ if (devices[i] instanceof PositionSensorVRDevice) {
+ self.devices.push(devices[i]);
+ }
+ }
+ self.devicesPopulated = true;
+ resolve(self.devices);
+ }, reject);
+ }
+ }
+
+ self.populateDevices();
+ resolve(self.devices);
+ } catch (e) {
+ reject(e);
+ }
+ });
+};
+
+/**
+ * Determine if a device is mobile.
+ */
+WebVRPolyfill.prototype.isMobile = function() {
+ return /Android/i.test(navigator.userAgent) ||
+ /iPhone|iPad|iPod/i.test(navigator.userAgent);
+};
+
+WebVRPolyfill.prototype.isCardboardCompatible = function() {
+ // For now, support all iOS and Android devices.
+ // Also enable the WebVRConfig.FORCE_VR flag for debugging.
+ return this.isMobile() || WebVRConfig.FORCE_ENABLE_VR;
+};
+
+// Installs a shim that updates a WebVR 1.0 spec implementation to WebVR 1.1
+function InstallWebVRSpecShim() {
+ if ('VRDisplay' in window && !('VRFrameData' in window)) {
+ // Provide the VRFrameData object.
+ window.VRFrameData = VRFrameData;
+
+ // A lot of Chrome builds don't have depthNear and depthFar, even
+ // though they're in the WebVR 1.0 spec. Patch them in if they're not present.
+ if(!('depthNear' in window.VRDisplay.prototype)) {
+ window.VRDisplay.prototype.depthNear = 0.01;
+ }
+
+ if(!('depthFar' in window.VRDisplay.prototype)) {
+ window.VRDisplay.prototype.depthFar = 10000.0;
+ }
+
+ window.VRDisplay.prototype.getFrameData = function(frameData) {
+ return Util.frameDataFromPose(frameData, this.getPose(), this);
+ }
+ }
+};
+
+module.exports.WebVRPolyfill = WebVRPolyfill;
+module.exports.InstallWebVRSpecShim = InstallWebVRSpecShim;
+
+},{"./base.js":2,"./cardboard-vr-display.js":5,"./display-wrappers.js":8,"./mouse-keyboard-vr-display.js":15,"./util.js":22}]},{},[13]);
diff --git a/tests/html/webvr/js/third-party/wglu/wglu-debug-geometry.js b/tests/html/webvr/js/third-party/wglu/wglu-debug-geometry.js
new file mode 100644
index 00000000000..3f48e1aad78
--- /dev/null
+++ b/tests/html/webvr/js/third-party/wglu/wglu-debug-geometry.js
@@ -0,0 +1,270 @@
+/*
+Copyright (c) 2016, Brandon Jones.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+*/
+
+var WGLUDebugGeometry = (function() {
+
+ "use strict";
+
+ var debugGeomVS = [
+ "uniform mat4 projectionMat;",
+ "uniform mat4 viewMat;",
+ "uniform mat4 modelMat;",
+ "attribute vec3 position;",
+
+ "void main() {",
+ " gl_Position = projectionMat * viewMat * modelMat * vec4( position, 1.0 );",
+ "}",
+ ].join("\n");
+
+ var debugGeomFS = [
+ "precision mediump float;",
+ "uniform vec4 color;",
+
+ "void main() {",
+ " gl_FragColor = color;",
+ "}",
+ ].join("\n");
+
+ var DebugGeometry = function(gl) {
+ this.gl = gl;
+
+ this.projMat = mat4.create();
+ this.viewMat = mat4.create();
+ this.modelMat = mat4.create();
+
+ this.program = new WGLUProgram(gl);
+ this.program.attachShaderSource(debugGeomVS, gl.VERTEX_SHADER);
+ this.program.attachShaderSource(debugGeomFS, gl.FRAGMENT_SHADER);
+ this.program.bindAttribLocation({ position: 0 });
+ this.program.link();
+
+ var verts = [];
+ var indices = [];
+
+ //
+ // Cube Geometry
+ //
+ this.cubeIndexOffset = indices.length;
+
+ var size = 0.5;
+ // Bottom
+ var idx = verts.length / 3.0;
+ indices.push(idx, idx+1, idx+2);
+ indices.push(idx, idx+2, idx+3);
+
+ verts.push(-size, -size, -size);
+ verts.push(+size, -size, -size);
+ verts.push(+size, -size, +size);
+ verts.push(-size, -size, +size);
+
+ // Top
+ idx = verts.length / 3.0;
+ indices.push(idx, idx+2, idx+1);
+ indices.push(idx, idx+3, idx+2);
+
+ verts.push(-size, +size, -size);
+ verts.push(+size, +size, -size);
+ verts.push(+size, +size, +size);
+ verts.push(-size, +size, +size);
+
+ // Left
+ idx = verts.length / 3.0;
+ indices.push(idx, idx+2, idx+1);
+ indices.push(idx, idx+3, idx+2);
+
+ verts.push(-size, -size, -size);
+ verts.push(-size, +size, -size);
+ verts.push(-size, +size, +size);
+ verts.push(-size, -size, +size);
+
+ // Right
+ idx = verts.length / 3.0;
+ indices.push(idx, idx+1, idx+2);
+ indices.push(idx, idx+2, idx+3);
+
+ verts.push(+size, -size, -size);
+ verts.push(+size, +size, -size);
+ verts.push(+size, +size, +size);
+ verts.push(+size, -size, +size);
+
+ // Back
+ idx = verts.length / 3.0;
+ indices.push(idx, idx+2, idx+1);
+ indices.push(idx, idx+3, idx+2);
+
+ verts.push(-size, -size, -size);
+ verts.push(+size, -size, -size);
+ verts.push(+size, +size, -size);
+ verts.push(-size, +size, -size);
+
+ // Front
+ idx = verts.length / 3.0;
+ indices.push(idx, idx+1, idx+2);
+ indices.push(idx, idx+2, idx+3);
+
+ verts.push(-size, -size, +size);
+ verts.push(+size, -size, +size);
+ verts.push(+size, +size, +size);
+ verts.push(-size, +size, +size);
+
+ this.cubeIndexCount = indices.length - this.cubeIndexOffset;
+
+ //
+ // Cone Geometry
+ //
+ this.coneIndexOffset = indices.length;
+
+ var size = 0.5;
+ var conePointVertex = verts.length / 3.0;
+ var coneBaseVertex = conePointVertex+1;
+ var coneSegments = 16;
+
+ // Point
+ verts.push(0, size, 0);
+
+ // Base Vertices
+ for (var i = 0; i < coneSegments; ++i) {
+ if (i > 0) {
+ idx = verts.length / 3.0;
+ indices.push(idx-1, conePointVertex, idx);
+ }
+
+ var rad = ((Math.PI * 2) / coneSegments) * i;
+ verts.push(Math.sin(rad) * (size / 2.0), -size, Math.cos(rad) * (size / 2.0));
+ }
+
+ // Last triangle to fill the gap
+ indices.push(idx, conePointVertex, coneBaseVertex);
+
+ // Base triangles
+ for (var i = 2; i < coneSegments; ++i) {
+ indices.push(coneBaseVertex, coneBaseVertex+(i-1), coneBaseVertex+i);
+ }
+
+ this.coneIndexCount = indices.length - this.coneIndexOffset;
+
+ //
+ // Rect geometry
+ //
+ this.rectIndexOffset = indices.length;
+
+ idx = verts.length / 3.0;
+ indices.push(idx, idx+1, idx+2, idx+3, idx);
+
+ verts.push(0, 0, 0);
+ verts.push(1, 0, 0);
+ verts.push(1, 1, 0);
+ verts.push(0, 1, 0);
+
+ this.rectIndexCount = indices.length - this.rectIndexOffset;
+
+ this.vertBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
+
+ this.indexBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
+ };
+
+ DebugGeometry.prototype.bind = function(projectionMat, viewMat) {
+ var gl = this.gl;
+ var program = this.program;
+
+ program.use();
+
+ gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
+ gl.uniformMatrix4fv(program.uniform.viewMat, false, viewMat);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+
+ gl.enableVertexAttribArray(program.attrib.position);
+
+ gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 12, 0);
+ };
+
+ DebugGeometry.prototype.bindOrtho = function() {
+ mat4.ortho(this.projMat, 0, this.gl.canvas.width, this.gl.canvas.height, 0, 0.1, 1024);
+ mat4.identity(this.viewMat);
+ this.bind(this.projMat, this.viewMat);
+ };
+
+ DebugGeometry.prototype._bindUniforms = function(orientation, position, scale, color) {
+ if (!position) { position = [0, 0, 0]; }
+ if (!orientation) { orientation = [0, 0, 0, 1]; }
+ if (!scale) { scale = [1, 1, 1]; }
+ if (!color) { color = [1, 0, 0, 1]; }
+
+ mat4.fromRotationTranslationScale(this.modelMat, orientation, position, scale);
+ this.gl.uniformMatrix4fv(this.program.uniform.modelMat, false, this.modelMat);
+ this.gl.uniform4fv(this.program.uniform.color, color);
+ };
+
+ DebugGeometry.prototype.drawCube = function(orientation, position, size, color) {
+ var gl = this.gl;
+
+ if (!size) { size = 1; }
+ this._bindUniforms(orientation, position, [size, size, size], color);
+ gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
+ };
+
+ DebugGeometry.prototype.drawBox = function(orientation, position, scale, color) {
+ var gl = this.gl;
+
+ this._bindUniforms(orientation, position, scale, color);
+ gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
+ };
+
+ DebugGeometry.prototype.drawBoxWithMatrix = function(mat, color) {
+ var gl = this.gl;
+
+ gl.uniformMatrix4fv(this.program.uniform.modelMat, false, mat);
+ gl.uniform4fv(this.program.uniform.color, color);
+ gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
+ };
+
+ DebugGeometry.prototype.drawRect = function(x, y, width, height, color) {
+ var gl = this.gl;
+
+ this._bindUniforms(null, [x, y, -1], [width, height, 1], color);
+ gl.drawElements(gl.LINE_STRIP, this.rectIndexCount, gl.UNSIGNED_SHORT, this.rectIndexOffset * 2.0);
+ };
+
+ DebugGeometry.prototype.drawCone = function(orientation, position, size, color) {
+ var gl = this.gl;
+
+ if (!size) { size = 1; }
+ this._bindUniforms(orientation, position, [size, size, size], color);
+ gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
+ };
+
+ DebugGeometry.prototype.drawConeWithMatrix = function(mat, color) {
+ var gl = this.gl;
+
+ gl.uniformMatrix4fv(this.program.uniform.modelMat, false, mat);
+ gl.uniform4fv(this.program.uniform.color, color);
+ gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
+ };
+
+ return DebugGeometry;
+})();
diff --git a/tests/html/webvr/js/third-party/wglu/wglu-preserve-state.js b/tests/html/webvr/js/third-party/wglu/wglu-preserve-state.js
new file mode 100644
index 00000000000..16e1b6049ec
--- /dev/null
+++ b/tests/html/webvr/js/third-party/wglu/wglu-preserve-state.js
@@ -0,0 +1,162 @@
+/*
+Copyright (c) 2016, Brandon Jones.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+*/
+
+/*
+Caches specified GL state, runs a callback, and restores the cached state when
+done.
+
+Example usage:
+
+var savedState = [
+ gl.ARRAY_BUFFER_BINDING,
+
+ // TEXTURE_BINDING_2D or _CUBE_MAP must always be followed by the texure unit.
+ gl.TEXTURE_BINDING_2D, gl.TEXTURE0,
+
+ gl.CLEAR_COLOR,
+];
+// After this call the array buffer, texture unit 0, active texture, and clear
+// color will be restored. The viewport will remain changed, however, because
+// gl.VIEWPORT was not included in the savedState list.
+WGLUPreserveGLState(gl, savedState, function(gl) {
+ gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
+ gl.bufferData(gl.ARRAY_BUFFER, ....);
+
+ gl.activeTexture(gl.TEXTURE0);
+ gl.bindTexture(gl.TEXTURE_2D, texture);
+ gl.texImage2D(gl.TEXTURE_2D, ...);
+
+ gl.clearColor(1, 0, 0, 1);
+ gl.clear(gl.COLOR_BUFFER_BIT);
+});
+
+Note that this is not intended to be fast. Managing state in your own code to
+avoid redundant state setting and querying will always be faster. This function
+is most useful for cases where you may not have full control over the WebGL
+calls being made, such as tooling or effect injectors.
+*/
+
+function WGLUPreserveGLState(gl, bindings, callback) {
+ if (!bindings) {
+ callback(gl);
+ return;
+ }
+
+ var boundValues = [];
+
+ var activeTexture = null;
+ for (var i = 0; i < bindings.length; ++i) {
+ var binding = bindings[i];
+ switch (binding) {
+ case gl.TEXTURE_BINDING_2D:
+ case gl.TEXTURE_BINDING_CUBE_MAP:
+ var textureUnit = bindings[++i];
+ if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31) {
+ console.error("TEXTURE_BINDING_2D or TEXTURE_BINDING_CUBE_MAP must be followed by a valid texture unit");
+ boundValues.push(null, null);
+ break;
+ }
+ if (!activeTexture) {
+ activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
+ }
+ gl.activeTexture(textureUnit);
+ boundValues.push(gl.getParameter(binding), null);
+ break;
+ case gl.ACTIVE_TEXTURE:
+ activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
+ boundValues.push(null);
+ break;
+ default:
+ boundValues.push(gl.getParameter(binding));
+ break;
+ }
+ }
+
+ callback(gl);
+
+ for (var i = 0; i < bindings.length; ++i) {
+ var binding = bindings[i];
+ var boundValue = boundValues[i];
+ switch (binding) {
+ case gl.ACTIVE_TEXTURE:
+ break; // Ignore this binding, since we special-case it to happen last.
+ case gl.ARRAY_BUFFER_BINDING:
+ gl.bindBuffer(gl.ARRAY_BUFFER, boundValue);
+ break;
+ case gl.COLOR_CLEAR_VALUE:
+ gl.clearColor(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
+ break;
+ case gl.COLOR_WRITEMASK:
+ gl.colorMask(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
+ break;
+ case gl.CURRENT_PROGRAM:
+ gl.useProgram(boundValue);
+ break;
+ case gl.ELEMENT_ARRAY_BUFFER_BINDING:
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, boundValue);
+ break;
+ case gl.FRAMEBUFFER_BINDING:
+ gl.bindFramebuffer(gl.FRAMEBUFFER, boundValue);
+ break;
+ case gl.RENDERBUFFER_BINDING:
+ gl.bindRenderbuffer(gl.RENDERBUFFER, boundValue);
+ break;
+ case gl.TEXTURE_BINDING_2D:
+ var textureUnit = bindings[++i];
+ if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
+ break;
+ gl.activeTexture(textureUnit);
+ gl.bindTexture(gl.TEXTURE_2D, boundValue);
+ break;
+ case gl.TEXTURE_BINDING_CUBE_MAP:
+ var textureUnit = bindings[++i];
+ if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
+ break;
+ gl.activeTexture(textureUnit);
+ gl.bindTexture(gl.TEXTURE_CUBE_MAP, boundValue);
+ break;
+ case gl.VIEWPORT:
+ gl.viewport(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
+ break;
+ case gl.BLEND:
+ case gl.CULL_FACE:
+ case gl.DEPTH_TEST:
+ case gl.SCISSOR_TEST:
+ case gl.STENCIL_TEST:
+ if (boundValue) {
+ gl.enable(binding);
+ } else {
+ gl.disable(binding);
+ }
+ break;
+ default:
+ console.log("No GL restore behavior for 0x" + binding.toString(16));
+ break;
+ }
+
+ if (activeTexture) {
+ gl.activeTexture(activeTexture);
+ }
+ }
+}
diff --git a/tests/html/webvr/js/third-party/wglu/wglu-program.js b/tests/html/webvr/js/third-party/wglu/wglu-program.js
new file mode 100644
index 00000000000..911182edb8f
--- /dev/null
+++ b/tests/html/webvr/js/third-party/wglu/wglu-program.js
@@ -0,0 +1,179 @@
+/*
+Copyright (c) 2015, Brandon Jones.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+*/
+
+/*
+Utility class to make loading shader programs easier. Does all the error
+checking you typically want, automatically queries uniform and attribute
+locations, and attempts to take advantage of some browser's ability to link
+asynchronously by not querying any information from the program until it's
+first use.
+*/
+var WGLUProgram = (function() {
+
+ "use strict";
+
+ // Attempts to allow the browser to asynchronously compile and link
+ var Program = function(gl) {
+ this.gl = gl;
+ this.program = gl.createProgram();
+ this.attrib = null;
+ this.uniform = null;
+
+ this._firstUse = true;
+ this._vertexShader = null;
+ this._fragmentShader = null;
+ }
+
+ Program.prototype.attachShaderSource = function(source, type) {
+ var gl = this.gl;
+ var shader;
+
+ switch (type) {
+ case gl.VERTEX_SHADER:
+ this._vertexShader = gl.createShader(type);
+ shader = this._vertexShader;
+ break;
+ case gl.FRAGMENT_SHADER:
+ this._fragmentShader = gl.createShader(type);
+ shader = this._fragmentShader;
+ break;
+ default:
+ console.Error("Invalid Shader Type:", type);
+ return;
+ }
+
+ gl.attachShader(this.program, shader);
+ gl.shaderSource(shader, source);
+ gl.compileShader(shader);
+ }
+
+ Program.prototype.attachShaderSourceFromXHR = function(url, type) {
+ var self = this;
+ return new Promise(function(resolve, reject) {
+ var xhr = new XMLHttpRequest();
+ xhr.addEventListener("load", function (ev) {
+ if (xhr.status == 200) {
+ self.attachShaderSource(xhr.response, type);
+ resolve();
+ } else {
+ reject(xhr.statusText);
+ }
+ }, false);
+ xhr.open("GET", url, true);
+ xhr.send(null);
+ });
+ }
+
+ Program.prototype.attachShaderSourceFromTag = function(tagId, type) {
+ var shaderTag = document.getElementById(tagId);
+ if (!shaderTag) {
+ console.error("Shader source tag not found:", tagId);
+ return;
+ }
+
+ if (!type) {
+ if (shaderTag.type == "x-shader/x-vertex") {
+ type = this.gl.VERTEX_SHADER;
+ } else if (shaderTag.type == "x-shader/x-fragment") {
+ type = this.gl.FRAGMENT_SHADER;
+ } else {
+ console.error("Invalid Shader Type:", shaderTag.type);
+ return;
+ }
+ }
+
+ var src = "";
+ var k = shaderTag.firstChild;
+ while (k) {
+ if (k.nodeType == 3) {
+ src += k.textContent;
+ }
+ k = k.nextSibling;
+ }
+ this.attachShaderSource(src, type);
+ }
+
+ Program.prototype.bindAttribLocation = function(attribLocationMap) {
+ var gl = this.gl;
+
+ if (attribLocationMap) {
+ this.attrib = {};
+ for (var attribName in attribLocationMap) {
+ gl.bindAttribLocation(this.program, attribLocationMap[attribName], attribName);
+ this.attrib[attribName] = attribLocationMap[attribName];
+ }
+ }
+ }
+
+ Program.prototype.transformFeedbackVaryings = function(varyings, type) {
+ gl.transformFeedbackVaryings(this.program, varyings, type);
+ }
+
+ Program.prototype.link = function() {
+ this.gl.linkProgram(this.program);
+ }
+
+ Program.prototype.use = function() {
+ var gl = this.gl;
+
+ // If this is the first time the program has been used do all the error checking and
+ // attrib/uniform querying needed.
+ if (this._firstUse) {
+ if (!gl.getProgramParameter(this.program, gl.LINK_STATUS)) {
+ if (this._vertexShader && !gl.getShaderParameter(this._vertexShader, gl.COMPILE_STATUS)) {
+ console.error("Vertex shader compile error:", gl.getShaderInfoLog(this._vertexShader));
+ } else if (this._fragmentShader && !gl.getShaderParameter(this._fragmentShader, gl.COMPILE_STATUS)) {
+ console.error("Fragment shader compile error:", gl.getShaderInfoLog(this._fragmentShader));
+ } else {
+ console.error("Program link error:", gl.getProgramInfoLog(this.program));
+ }
+ gl.deleteProgram(this.program);
+ this.program = null;
+ } else {
+ if (!this.attrib) {
+ this.attrib = {};
+ var attribCount = gl.getProgramParameter(this.program, gl.ACTIVE_ATTRIBUTES);
+ for (var i = 0; i < attribCount; i++) {
+ var attribInfo = gl.getActiveAttrib(this.program, i);
+ this.attrib[attribInfo.name] = gl.getAttribLocation(this.program, attribInfo.name);
+ }
+ }
+
+ this.uniform = {};
+ var uniformCount = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
+ var uniformName = "";
+ for (var i = 0; i < uniformCount; i++) {
+ var uniformInfo = gl.getActiveUniform(this.program, i);
+ uniformName = uniformInfo.name.replace("[0]", "");
+ this.uniform[uniformName] = gl.getUniformLocation(this.program, uniformName);
+ }
+ }
+ gl.deleteShader(this._vertexShader);
+ gl.deleteShader(this._fragmentShader);
+ this._firstUse = false;
+ }
+
+ gl.useProgram(this.program);
+ }
+
+ return Program;
+})();
diff --git a/tests/html/webvr/js/third-party/wglu/wglu-stats.js b/tests/html/webvr/js/third-party/wglu/wglu-stats.js
new file mode 100644
index 00000000000..70aee00671b
--- /dev/null
+++ b/tests/html/webvr/js/third-party/wglu/wglu-stats.js
@@ -0,0 +1,649 @@
+/*
+Copyright (c) 2016, Brandon Jones.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+*/
+
+/*
+Heavily inspired by Mr. Doobs stats.js, this FPS counter is rendered completely
+with WebGL, allowing it to be shown in cases where overlaid HTML elements aren't
+usable (like WebVR), or if you want the FPS counter to be rendered as part of
+your scene.
+
+See stats-test.html for basic usage.
+*/
+var WGLUStats = (function() {
+
+ "use strict";
+
+ //--------------------
+ // glMatrix functions
+ //--------------------
+
+ // These functions have been copied here from glMatrix (glmatrix.net) to allow
+ // this file to run standalone.
+
+ var mat4_identity = function(out) {
+ out[0] = 1;
+ out[1] = 0;
+ out[2] = 0;
+ out[3] = 0;
+ out[4] = 0;
+ out[5] = 1;
+ out[6] = 0;
+ out[7] = 0;
+ out[8] = 0;
+ out[9] = 0;
+ out[10] = 1;
+ out[11] = 0;
+ out[12] = 0;
+ out[13] = 0;
+ out[14] = 0;
+ out[15] = 1;
+ return out;
+ };
+
+ var mat4_multiply = function (out, a, b) {
+ var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
+ a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
+ a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
+ a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15];
+
+ // Cache only the current line of the second matrix
+ var b0 = b[0], b1 = b[1], b2 = b[2], b3 = b[3];
+ out[0] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
+ out[1] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
+ out[2] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
+ out[3] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
+
+ b0 = b[4]; b1 = b[5]; b2 = b[6]; b3 = b[7];
+ out[4] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
+ out[5] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
+ out[6] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
+ out[7] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
+
+ b0 = b[8]; b1 = b[9]; b2 = b[10]; b3 = b[11];
+ out[8] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
+ out[9] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
+ out[10] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
+ out[11] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
+
+ b0 = b[12]; b1 = b[13]; b2 = b[14]; b3 = b[15];
+ out[12] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
+ out[13] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
+ out[14] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
+ out[15] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
+ return out;
+ };
+
+ var mat4_fromTranslation = function(out, v) {
+ out[0] = 1;
+ out[1] = 0;
+ out[2] = 0;
+ out[3] = 0;
+ out[4] = 0;
+ out[5] = 1;
+ out[6] = 0;
+ out[7] = 0;
+ out[8] = 0;
+ out[9] = 0;
+ out[10] = 1;
+ out[11] = 0;
+ out[12] = v[0];
+ out[13] = v[1];
+ out[14] = v[2];
+ out[15] = 1;
+ return out;
+ };
+
+ var mat4_ortho = function (out, left, right, bottom, top, near, far) {
+ var lr = 1 / (left - right),
+ bt = 1 / (bottom - top),
+ nf = 1 / (near - far);
+ out[0] = -2 * lr;
+ out[1] = 0;
+ out[2] = 0;
+ out[3] = 0;
+ out[4] = 0;
+ out[5] = -2 * bt;
+ out[6] = 0;
+ out[7] = 0;
+ out[8] = 0;
+ out[9] = 0;
+ out[10] = 2 * nf;
+ out[11] = 0;
+ out[12] = (left + right) * lr;
+ out[13] = (top + bottom) * bt;
+ out[14] = (far + near) * nf;
+ out[15] = 1;
+ return out;
+ };
+
+ var mat4_translate = function (out, a, v) {
+ var x = v[0], y = v[1], z = v[2],
+ a00, a01, a02, a03,
+ a10, a11, a12, a13,
+ a20, a21, a22, a23;
+
+ if (a === out) {
+ out[12] = a[0] * x + a[4] * y + a[8] * z + a[12];
+ out[13] = a[1] * x + a[5] * y + a[9] * z + a[13];
+ out[14] = a[2] * x + a[6] * y + a[10] * z + a[14];
+ out[15] = a[3] * x + a[7] * y + a[11] * z + a[15];
+ } else {
+ a00 = a[0]; a01 = a[1]; a02 = a[2]; a03 = a[3];
+ a10 = a[4]; a11 = a[5]; a12 = a[6]; a13 = a[7];
+ a20 = a[8]; a21 = a[9]; a22 = a[10]; a23 = a[11];
+
+ out[0] = a00; out[1] = a01; out[2] = a02; out[3] = a03;
+ out[4] = a10; out[5] = a11; out[6] = a12; out[7] = a13;
+ out[8] = a20; out[9] = a21; out[10] = a22; out[11] = a23;
+
+ out[12] = a00 * x + a10 * y + a20 * z + a[12];
+ out[13] = a01 * x + a11 * y + a21 * z + a[13];
+ out[14] = a02 * x + a12 * y + a22 * z + a[14];
+ out[15] = a03 * x + a13 * y + a23 * z + a[15];
+ }
+
+ return out;
+ };
+
+ var mat4_scale = function(out, a, v) {
+ var x = v[0], y = v[1], z = v[2];
+
+ out[0] = a[0] * x;
+ out[1] = a[1] * x;
+ out[2] = a[2] * x;
+ out[3] = a[3] * x;
+ out[4] = a[4] * y;
+ out[5] = a[5] * y;
+ out[6] = a[6] * y;
+ out[7] = a[7] * y;
+ out[8] = a[8] * z;
+ out[9] = a[9] * z;
+ out[10] = a[10] * z;
+ out[11] = a[11] * z;
+ out[12] = a[12];
+ out[13] = a[13];
+ out[14] = a[14];
+ out[15] = a[15];
+ return out;
+ };
+
+ //-------------------
+ // Utility functions
+ //-------------------
+
+ function linkProgram(gl, vertexSource, fragmentSource, attribLocationMap) {
+ // No error checking for brevity.
+ var vertexShader = gl.createShader(gl.VERTEX_SHADER);
+ gl.shaderSource(vertexShader, vertexSource);
+ gl.compileShader(vertexShader);
+
+ var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
+ gl.shaderSource(fragmentShader, fragmentSource);
+ gl.compileShader(fragmentShader);
+
+ var program = gl.createProgram();
+ gl.attachShader(program, vertexShader);
+ gl.attachShader(program, fragmentShader);
+
+ for (var attribName in attribLocationMap)
+ gl.bindAttribLocation(program, attribLocationMap[attribName], attribName);
+
+ gl.linkProgram(program);
+
+ gl.deleteShader(vertexShader);
+ gl.deleteShader(fragmentShader);
+
+ return program;
+ }
+
+ function getProgramUniforms(gl, program) {
+ var uniforms = {};
+ var uniformCount = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS);
+ var uniformName = "";
+ for (var i = 0; i < uniformCount; i++) {
+ var uniformInfo = gl.getActiveUniform(program, i);
+ uniformName = uniformInfo.name.replace("[0]", "");
+ uniforms[uniformName] = gl.getUniformLocation(program, uniformName);
+ }
+ return uniforms;
+ }
+
+ //----------------------------
+ // Seven-segment text display
+ //----------------------------
+
+ var sevenSegmentVS = [
+ "uniform mat4 projectionMat;",
+ "uniform mat4 modelViewMat;",
+ "attribute vec2 position;",
+
+ "void main() {",
+ " gl_Position = projectionMat * modelViewMat * vec4( position, 0.0, 1.0 );",
+ "}",
+ ].join("\n");
+
+ var sevenSegmentFS = [
+ "precision mediump float;",
+ "uniform vec4 color;",
+
+ "void main() {",
+ " gl_FragColor = color;",
+ "}",
+ ].join("\n");
+
+ var SevenSegmentText = function (gl) {
+ this.gl = gl;
+
+ this.attribs = {
+ position: 0,
+ color: 1
+ };
+
+ this.program = linkProgram(gl, sevenSegmentVS, sevenSegmentFS, this.attribs);
+ this.uniforms = getProgramUniforms(gl, this.program);
+
+ var verts = [];
+ var segmentIndices = {};
+ var indices = [];
+
+ var width = 0.5;
+ var thickness = 0.25;
+ this.kerning = 2.0;
+
+ this.matrix = new Float32Array(16);
+
+ function defineSegment(id, left, top, right, bottom) {
+ var idx = verts.length / 2;
+ verts.push(
+ left, top,
+ right, top,
+ right, bottom,
+ left, bottom);
+
+ segmentIndices[id] = [
+ idx, idx+2, idx+1,
+ idx, idx+3, idx+2];
+ }
+
+ var characters = {};
+ this.characters = characters;
+
+ function defineCharacter(c, segments) {
+ var character = {
+ character: c,
+ offset: indices.length * 2,
+ count: 0
+ };
+
+ for (var i = 0; i < segments.length; ++i) {
+ var idx = segments[i];
+ var segment = segmentIndices[idx];
+ character.count += segment.length;
+ indices.push.apply(indices, segment);
+ }
+
+ characters[c] = character;
+ }
+
+ /* Segment layout is as follows:
+
+ |-0-|
+ 3 4
+ |-1-|
+ 5 6
+ |-2-|
+
+ */
+
+ defineSegment(0, -1, 1, width, 1-thickness);
+ defineSegment(1, -1, thickness*0.5, width, -thickness*0.5);
+ defineSegment(2, -1, -1+thickness, width, -1);
+ defineSegment(3, -1, 1, -1+thickness, -thickness*0.5);
+ defineSegment(4, width-thickness, 1, width, -thickness*0.5);
+ defineSegment(5, -1, thickness*0.5, -1+thickness, -1);
+ defineSegment(6, width-thickness, thickness*0.5, width, -1);
+
+
+ defineCharacter("0", [0, 2, 3, 4, 5, 6]);
+ defineCharacter("1", [4, 6]);
+ defineCharacter("2", [0, 1, 2, 4, 5]);
+ defineCharacter("3", [0, 1, 2, 4, 6]);
+ defineCharacter("4", [1, 3, 4, 6]);
+ defineCharacter("5", [0, 1, 2, 3, 6]);
+ defineCharacter("6", [0, 1, 2, 3, 5, 6]);
+ defineCharacter("7", [0, 4, 6]);
+ defineCharacter("8", [0, 1, 2, 3, 4, 5, 6]);
+ defineCharacter("9", [0, 1, 2, 3, 4, 6]);
+ defineCharacter("A", [0, 1, 3, 4, 5, 6]);
+ defineCharacter("B", [1, 2, 3, 5, 6]);
+ defineCharacter("C", [0, 2, 3, 5]);
+ defineCharacter("D", [1, 2, 4, 5, 6]);
+ defineCharacter("E", [0, 1, 2, 4, 6]);
+ defineCharacter("F", [0, 1, 3, 5]);
+ defineCharacter("P", [0, 1, 3, 4, 5]);
+ defineCharacter("-", [1]);
+ defineCharacter(" ", []);
+ defineCharacter("_", [2]); // Used for undefined characters
+
+ this.vertBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.DYNAMIC_DRAW);
+
+ this.indexBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
+ };
+
+ SevenSegmentText.prototype.render = function(projectionMat, modelViewMat, text, r, g, b, a) {
+ var gl = this.gl;
+
+ if (r == undefined || g == undefined || b == undefined) {
+ r = 0.0;
+ g = 1.0;
+ b = 0.0;
+ }
+
+ if (a == undefined)
+ a = 1.0;
+
+ gl.useProgram(this.program);
+
+ gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
+ gl.uniform4f(this.uniforms.color, r, g, b, a);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+
+ gl.enableVertexAttribArray(this.attribs.position);
+ gl.vertexAttribPointer(this.attribs.position, 2, gl.FLOAT, false, 8, 0);
+
+ text = text.toUpperCase();
+
+ var offset = 0;
+
+ for (var i = 0; i < text.length; ++i) {
+ var c;
+ if (text[i] in this.characters) {
+ c = this.characters[text[i]];
+ } else {
+ c = this.characters["_"];
+ }
+
+ if (c.count != 0) {
+ mat4_fromTranslation(this.matrix, [offset, 0, 0]);
+ mat4_multiply(this.matrix, modelViewMat, this.matrix);
+
+ gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, this.matrix);
+ gl.drawElements(gl.TRIANGLES, c.count, gl.UNSIGNED_SHORT, c.offset);
+
+ }
+
+ offset += this.kerning;
+ }
+ }
+
+ //-----------
+ // FPS Graph
+ //-----------
+
+ var statsVS = [
+ "uniform mat4 projectionMat;",
+ "uniform mat4 modelViewMat;",
+ "attribute vec3 position;",
+ "attribute vec3 color;",
+ "varying vec4 vColor;",
+
+ "void main() {",
+ " vColor = vec4(color, 1.0);",
+ " gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
+ "}",
+ ].join("\n");
+
+ var statsFS = [
+ "precision mediump float;",
+ "varying vec4 vColor;",
+
+ "void main() {",
+ " gl_FragColor = vColor;",
+ "}",
+ ].join("\n");
+
+ var segments = 30;
+ var maxFPS = 90;
+
+ function segmentToX(i) {
+ return ((0.9/segments) * i) - 0.45;
+ }
+
+ function fpsToY(value) {
+ return (Math.min(value, maxFPS) * (0.7 / maxFPS)) - 0.45;
+ }
+
+ function fpsToRGB(value) {
+ return {
+ r: Math.max(0.0, Math.min(1.0, 1.0 - (value/60))),
+ g: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15)))),
+ b: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15))))
+ };
+ }
+
+ var now = /*( performance && performance.now ) ? performance.now.bind( performance ) :*/ Date.now;
+
+ var Stats = function(gl) {
+ this.gl = gl;
+
+ this.sevenSegmentText = new SevenSegmentText(gl);
+
+ this.startTime = now();
+ this.prevTime = this.startTime;
+ this.frames = 0;
+ this.fps = 0;
+
+ this.orthoProjMatrix = new Float32Array(16);
+ this.orthoViewMatrix = new Float32Array(16);
+ this.modelViewMatrix = new Float32Array(16);
+
+ // Hard coded because it doesn't change:
+ // Scale by 0.075 in X and Y
+ // Translate into upper left corner w/ z = 0.02
+ this.textMatrix = new Float32Array([
+ 0.075, 0, 0, 0,
+ 0, 0.075, 0, 0,
+ 0, 0, 1, 0,
+ -0.3625, 0.3625, 0.02, 1
+ ]);
+
+ this.lastSegment = 0;
+
+ this.attribs = {
+ position: 0,
+ color: 1
+ };
+
+ this.program = linkProgram(gl, statsVS, statsFS, this.attribs);
+ this.uniforms = getProgramUniforms(gl, this.program);
+
+ var fpsVerts = [];
+ var fpsIndices = [];
+
+ // Graph geometry
+ for (var i = 0; i < segments; ++i) {
+ // Bar top
+ fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
+ fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
+
+ // Bar bottom
+ fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
+ fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
+
+ var idx = i * 4;
+ fpsIndices.push(idx, idx+3, idx+1,
+ idx+3, idx, idx+2);
+ }
+
+ function addBGSquare(left, bottom, right, top, z, r, g, b) {
+ var idx = fpsVerts.length / 6;
+
+ fpsVerts.push(left, bottom, z, r, g, b);
+ fpsVerts.push(right, top, z, r, g, b);
+ fpsVerts.push(left, top, z, r, g, b);
+ fpsVerts.push(right, bottom, z, r, g, b);
+
+ fpsIndices.push(idx, idx+1, idx+2,
+ idx, idx+3, idx+1);
+ };
+
+ // Panel Background
+ addBGSquare(-0.5, -0.5, 0.5, 0.5, 0.0, 0.0, 0.0, 0.125);
+
+ // FPS Background
+ addBGSquare(-0.45, -0.45, 0.45, 0.25, 0.01, 0.0, 0.0, 0.4);
+
+ // 30 FPS line
+ addBGSquare(-0.45, fpsToY(30), 0.45, fpsToY(32), 0.015, 0.5, 0.0, 0.5);
+
+ // 60 FPS line
+ addBGSquare(-0.45, fpsToY(60), 0.45, fpsToY(62), 0.015, 0.2, 0.0, 0.75);
+
+ this.fpsVertBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(fpsVerts), gl.DYNAMIC_DRAW);
+
+ this.fpsIndexBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(fpsIndices), gl.STATIC_DRAW);
+
+ this.fpsIndexCount = fpsIndices.length;
+ };
+
+ Stats.prototype.begin = function() {
+ this.startTime = now();
+ };
+
+ Stats.prototype.end = function() {
+ var time = now();
+
+ this.frames++;
+
+ if (time > this.prevTime + 250) {
+ this.fps = Math.round((this.frames * 1000) / (time - this.prevTime));
+
+ this.updateGraph(this.fps);
+
+ this.prevTime = time;
+ this.frames = 0;
+ }
+ };
+
+ Stats.prototype.updateGraph = function(value) {
+ var gl = this.gl;
+
+ var color = fpsToRGB(value);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
+
+ // Update the current segment with the new FPS value
+ var updateVerts = [
+ segmentToX(this.lastSegment), fpsToY(value), 0.02, color.r, color.g, color.b,
+ segmentToX(this.lastSegment+1), fpsToY(value), 0.02, color.r, color.g, color.b,
+ segmentToX(this.lastSegment), fpsToY(0), 0.02, color.r, color.g, color.b,
+ segmentToX(this.lastSegment+1), fpsToY(0), 0.02, color.r, color.g, color.b,
+ ];
+
+ // Re-shape the next segment into the green "progress" line
+ color.r = 0.2;
+ color.g = 1.0;
+ color.b = 0.2;
+
+ if (this.lastSegment == segments - 1) {
+ // If we're updating the last segment we need to do two bufferSubDatas
+ // to update the segment and turn the first segment into the progress line.
+ gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
+ updateVerts = [
+ segmentToX(0), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
+ segmentToX(.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
+ segmentToX(0), fpsToY(0), 0.02, color.r, color.g, color.b,
+ segmentToX(.25), fpsToY(0), 0.02, color.r, color.g, color.b
+ ];
+ gl.bufferSubData(gl.ARRAY_BUFFER, 0, new Float32Array(updateVerts));
+ } else {
+ updateVerts.push(
+ segmentToX(this.lastSegment+1), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
+ segmentToX(this.lastSegment+1.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
+ segmentToX(this.lastSegment+1), fpsToY(0), 0.02, color.r, color.g, color.b,
+ segmentToX(this.lastSegment+1.25), fpsToY(0), 0.02, color.r, color.g, color.b
+ );
+ gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
+ }
+
+ this.lastSegment = (this.lastSegment+1) % segments;
+ };
+
+ Stats.prototype.render = function(projectionMat, modelViewMat) {
+ var gl = this.gl;
+
+ // Render text first, minor win for early fragment discard
+ mat4_multiply(this.modelViewMatrix, modelViewMat, this.textMatrix);
+ this.sevenSegmentText.render(projectionMat, this.modelViewMatrix, this.fps + " FP5");
+
+ gl.useProgram(this.program);
+
+ gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
+ gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, modelViewMat);
+
+ gl.enableVertexAttribArray(this.attribs.position);
+ gl.enableVertexAttribArray(this.attribs.color);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
+
+ gl.vertexAttribPointer(this.attribs.position, 3, gl.FLOAT, false, 24, 0);
+ gl.vertexAttribPointer(this.attribs.color, 3, gl.FLOAT, false, 24, 12);
+
+ // Draw the graph and background in a single call
+ gl.drawElements(gl.TRIANGLES, this.fpsIndexCount, gl.UNSIGNED_SHORT, 0);
+ }
+
+ Stats.prototype.renderOrtho = function(x, y, width, height) {
+ var canvas = this.gl.canvas;
+
+ if (x == undefined || y == undefined) {
+ x = 10 * window.devicePixelRatio;
+ y = 10 * window.devicePixelRatio;
+ }
+ if (width == undefined || height == undefined) {
+ width = 75 * window.devicePixelRatio;
+ height = 75 * window.devicePixelRatio;
+ }
+
+ mat4_ortho(this.orthoProjMatrix, 0, canvas.width, 0, canvas.height, 0.1, 1024);
+
+ mat4_identity(this.orthoViewMatrix);
+ mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [x, canvas.height - height - y, -1]);
+ mat4_scale(this.orthoViewMatrix, this.orthoViewMatrix, [width, height, 1]);
+ mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [0.5, 0.5, 0]);
+
+ this.render(this.orthoProjMatrix, this.orthoViewMatrix);
+ }
+
+ return Stats;
+})();
diff --git a/tests/html/webvr/js/third-party/wglu/wglu-texture.js b/tests/html/webvr/js/third-party/wglu/wglu-texture.js
new file mode 100644
index 00000000000..6bfa368ab92
--- /dev/null
+++ b/tests/html/webvr/js/third-party/wglu/wglu-texture.js
@@ -0,0 +1,687 @@
+/*
+Copyright (c) 2015, Brandon Jones.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+*/
+
+/*
+Handles loading of textures of mutliple formats, tries to be efficent about it.
+
+Formats supported will vary by devices. Use the .supports<format>() functions
+to determine if a format is supported. Most of the time you can just call
+loader.loadTexture("url"); and it will handle it based on the extension.
+If the extension can't be relied on use the corresponding
+.load<Extension>("url") calls.
+*/
+var WGLUTextureLoader = (function() {
+
+ "use strict";
+
+ //============================//
+ // DXT constants and utilites //
+ //============================//
+
+ // Utility functions
+ // Builds a numeric code for a given fourCC string
+ function fourCCToInt32(value) {
+ return value.charCodeAt(0) +
+ (value.charCodeAt(1) << 8) +
+ (value.charCodeAt(2) << 16) +
+ (value.charCodeAt(3) << 24);
+ }
+
+ // Turns a fourCC numeric code into a string
+ function int32ToFourCC(value) {
+ return String.fromCharCode(
+ value & 0xff,
+ (value >> 8) & 0xff,
+ (value >> 16) & 0xff,
+ (value >> 24) & 0xff
+ );
+ }
+
+ // Calcualates the size of a compressed texture level in bytes
+ function textureLevelSize(format, width, height) {
+ switch (format) {
+ case COMPRESSED_RGB_S3TC_DXT1_EXT:
+ case COMPRESSED_RGB_ATC_WEBGL:
+ case COMPRESSED_RGB_ETC1_WEBGL:
+ return ((width + 3) >> 2) * ((height + 3) >> 2) * 8;
+
+ case COMPRESSED_RGBA_S3TC_DXT3_EXT:
+ case COMPRESSED_RGBA_S3TC_DXT5_EXT:
+ case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
+ case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
+ return ((width + 3) >> 2) * ((height + 3) >> 2) * 16;
+
+ case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
+ case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
+ return Math.floor((Math.max(width, 8) * Math.max(height, 8) * 4 + 7) / 8);
+
+ case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
+ case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
+ return Math.floor((Math.max(width, 16) * Math.max(height, 8) * 2 + 7) / 8);
+
+ default:
+ return 0;
+ }
+ }
+
+ // DXT formats, from:
+ // http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_s3tc/
+ var COMPRESSED_RGB_S3TC_DXT1_EXT = 0x83F0;
+ var COMPRESSED_RGBA_S3TC_DXT1_EXT = 0x83F1;
+ var COMPRESSED_RGBA_S3TC_DXT3_EXT = 0x83F2;
+ var COMPRESSED_RGBA_S3TC_DXT5_EXT = 0x83F3;
+
+ // ATC formats, from:
+ // http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_atc/
+ var COMPRESSED_RGB_ATC_WEBGL = 0x8C92;
+ var COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL = 0x8C93;
+ var COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL = 0x87EE;
+
+ // DXT values and structures referenced from:
+ // http://msdn.microsoft.com/en-us/library/bb943991.aspx/
+ var DDS_MAGIC = 0x20534444;
+ var DDSD_MIPMAPCOUNT = 0x20000;
+ var DDPF_FOURCC = 0x4;
+
+ var DDS_HEADER_LENGTH = 31; // The header length in 32 bit ints.
+
+ // Offsets into the header array.
+ var DDS_HEADER_MAGIC = 0;
+
+ var DDS_HEADER_SIZE = 1;
+ var DDS_HEADER_FLAGS = 2;
+ var DDS_HEADER_HEIGHT = 3;
+ var DDS_HEADER_WIDTH = 4;
+
+ var DDS_HEADER_MIPMAPCOUNT = 7;
+
+ var DDS_HEADER_PF_FLAGS = 20;
+ var DDS_HEADER_PF_FOURCC = 21;
+
+ // FourCC format identifiers.
+ var FOURCC_DXT1 = fourCCToInt32("DXT1");
+ var FOURCC_DXT3 = fourCCToInt32("DXT3");
+ var FOURCC_DXT5 = fourCCToInt32("DXT5");
+
+ var FOURCC_ATC = fourCCToInt32("ATC ");
+ var FOURCC_ATCA = fourCCToInt32("ATCA");
+ var FOURCC_ATCI = fourCCToInt32("ATCI");
+
+ //==================//
+ // Crunch constants //
+ //==================//
+
+ // Taken from crnlib.h
+ var CRN_FORMAT = {
+ cCRNFmtInvalid: -1,
+
+ cCRNFmtDXT1: 0,
+ // cCRNFmtDXT3 is not currently supported when writing to CRN - only DDS.
+ cCRNFmtDXT3: 1,
+ cCRNFmtDXT5: 2
+
+ // Crunch supports more formats than this, but we can't use them here.
+ };
+
+ // Mapping of Crunch formats to DXT formats.
+ var DXT_FORMAT_MAP = {};
+ DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT1] = COMPRESSED_RGB_S3TC_DXT1_EXT;
+ DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT3] = COMPRESSED_RGBA_S3TC_DXT3_EXT;
+ DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT5] = COMPRESSED_RGBA_S3TC_DXT5_EXT;
+
+ //===============//
+ // PVR constants //
+ //===============//
+
+ // PVR formats, from:
+ // http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_pvrtc/
+ var COMPRESSED_RGB_PVRTC_4BPPV1_IMG = 0x8C00;
+ var COMPRESSED_RGB_PVRTC_2BPPV1_IMG = 0x8C01;
+ var COMPRESSED_RGBA_PVRTC_4BPPV1_IMG = 0x8C02;
+ var COMPRESSED_RGBA_PVRTC_2BPPV1_IMG = 0x8C03;
+
+ // ETC1 format, from:
+ // http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_etc1/
+ var COMPRESSED_RGB_ETC1_WEBGL = 0x8D64;
+
+ var PVR_FORMAT_2BPP_RGB = 0;
+ var PVR_FORMAT_2BPP_RGBA = 1;
+ var PVR_FORMAT_4BPP_RGB = 2;
+ var PVR_FORMAT_4BPP_RGBA = 3;
+ var PVR_FORMAT_ETC1 = 6;
+ var PVR_FORMAT_DXT1 = 7;
+ var PVR_FORMAT_DXT3 = 9;
+ var PVR_FORMAT_DXT5 = 5;
+
+ var PVR_HEADER_LENGTH = 13; // The header length in 32 bit ints.
+ var PVR_MAGIC = 0x03525650; //0x50565203;
+
+ // Offsets into the header array.
+ var PVR_HEADER_MAGIC = 0;
+ var PVR_HEADER_FORMAT = 2;
+ var PVR_HEADER_HEIGHT = 6;
+ var PVR_HEADER_WIDTH = 7;
+ var PVR_HEADER_MIPMAPCOUNT = 11;
+ var PVR_HEADER_METADATA = 12;
+
+ //============//
+ // Misc Utils //
+ //============//
+
+ // When an error occurs set the texture to a 1x1 black pixel
+ // This prevents WebGL errors from attempting to use unrenderable textures
+ // and clears out stale data if we're re-using a texture.
+ function clearOnError(gl, error, texture, callback) {
+ if (console) {
+ console.error(error);
+ }
+ gl.bindTexture(gl.TEXTURE_2D, texture);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1, 1, 0, gl.RGB, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0]));
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
+
+ // Notify the user that an error occurred and the texture is ready.
+ if (callback) { callback(texture, error, null); }
+ }
+
+ function isPowerOfTwo(n) {
+ return (n & (n - 1)) === 0;
+ }
+
+ function getExtension(gl, name) {
+ var vendorPrefixes = ["", "WEBKIT_", "MOZ_"];
+ var ext = null;
+ for (var i in vendorPrefixes) {
+ ext = gl.getExtension(vendorPrefixes[i] + name);
+ if (ext) { break; }
+ }
+ return ext;
+ }
+
+ //==================//
+ // DDS File Reading //
+ //==================//
+
+ // Parse a DDS file and provide information about the raw DXT data it contains to the given callback.
+ function parseDDS(arrayBuffer, callback, errorCallback) {
+ // Callbacks must be provided.
+ if (!callback || !errorCallback) { return; }
+
+ // Get a view of the arrayBuffer that represents the DDS header.
+ var header = new Int32Array(arrayBuffer, 0, DDS_HEADER_LENGTH);
+
+ // Do some sanity checks to make sure this is a valid DDS file.
+ if(header[DDS_HEADER_MAGIC] != DDS_MAGIC) {
+ errorCallback("Invalid magic number in DDS header");
+ return 0;
+ }
+
+ if(!header[DDS_HEADER_PF_FLAGS] & DDPF_FOURCC) {
+ errorCallback("Unsupported format, must contain a FourCC code");
+ return 0;
+ }
+
+ // Determine what type of compressed data the file contains.
+ var fourCC = header[DDS_HEADER_PF_FOURCC];
+ var internalFormat;
+ switch(fourCC) {
+ case FOURCC_DXT1:
+ internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
+ break;
+
+ case FOURCC_DXT3:
+ internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
+ break;
+
+ case FOURCC_DXT5:
+ internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
+ break;
+
+ case FOURCC_ATC:
+ internalFormat = COMPRESSED_RGB_ATC_WEBGL;
+ break;
+
+ case FOURCC_ATCA:
+ internalFormat = COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL;
+ break;
+
+ case FOURCC_ATCI:
+ internalFormat = COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL;
+ break;
+
+
+ default:
+ errorCallback("Unsupported FourCC code: " + int32ToFourCC(fourCC));
+ return;
+ }
+
+ // Determine how many mipmap levels the file contains.
+ var levels = 1;
+ if(header[DDS_HEADER_FLAGS] & DDSD_MIPMAPCOUNT) {
+ levels = Math.max(1, header[DDS_HEADER_MIPMAPCOUNT]);
+ }
+
+ // Gather other basic metrics and a view of the raw the DXT data.
+ var width = header[DDS_HEADER_WIDTH];
+ var height = header[DDS_HEADER_HEIGHT];
+ var dataOffset = header[DDS_HEADER_SIZE] + 4;
+ var dxtData = new Uint8Array(arrayBuffer, dataOffset);
+
+ // Pass the DXT information to the callback for uploading.
+ callback(dxtData, width, height, levels, internalFormat);
+ }
+
+ //==================//
+ // PVR File Reading //
+ //==================//
+
+ // Parse a PVR file and provide information about the raw texture data it contains to the given callback.
+ function parsePVR(arrayBuffer, callback, errorCallback) {
+ // Callbacks must be provided.
+ if (!callback || !errorCallback) { return; }
+
+ // Get a view of the arrayBuffer that represents the DDS header.
+ var header = new Int32Array(arrayBuffer, 0, PVR_HEADER_LENGTH);
+
+ // Do some sanity checks to make sure this is a valid DDS file.
+ if(header[PVR_HEADER_MAGIC] != PVR_MAGIC) {
+ errorCallback("Invalid magic number in PVR header");
+ return 0;
+ }
+
+ // Determine what type of compressed data the file contains.
+ var format = header[PVR_HEADER_FORMAT];
+ var internalFormat;
+ switch(format) {
+ case PVR_FORMAT_2BPP_RGB:
+ internalFormat = COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
+ break;
+
+ case PVR_FORMAT_2BPP_RGBA:
+ internalFormat = COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
+ break;
+
+ case PVR_FORMAT_4BPP_RGB:
+ internalFormat = COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
+ break;
+
+ case PVR_FORMAT_4BPP_RGBA:
+ internalFormat = COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
+ break;
+
+ case PVR_FORMAT_ETC1:
+ internalFormat = COMPRESSED_RGB_ETC1_WEBGL;
+ break;
+
+ case PVR_FORMAT_DXT1:
+ internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
+ break;
+
+ case PVR_FORMAT_DXT3:
+ internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
+ break;
+
+ case PVR_FORMAT_DXT5:
+ internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
+ break;
+
+ default:
+ errorCallback("Unsupported PVR format: " + format);
+ return;
+ }
+
+ // Gather other basic metrics and a view of the raw the DXT data.
+ var width = header[PVR_HEADER_WIDTH];
+ var height = header[PVR_HEADER_HEIGHT];
+ var levels = header[PVR_HEADER_MIPMAPCOUNT];
+ var dataOffset = header[PVR_HEADER_METADATA] + 52;
+ var pvrtcData = new Uint8Array(arrayBuffer, dataOffset);
+
+ // Pass the PVRTC information to the callback for uploading.
+ callback(pvrtcData, width, height, levels, internalFormat);
+ }
+
+ //=============//
+ // IMG loading //
+ //=============//
+
+ /*
+ This function provides a method for loading webgl textures using a pool of
+ image elements, which has very low memory overhead. For more details see:
+ http://blog.tojicode.com/2012/03/javascript-memory-optimization-and.html
+ */
+ var loadImgTexture = (function createTextureLoader() {
+ var MAX_CACHE_IMAGES = 16;
+
+ var textureImageCache = new Array(MAX_CACHE_IMAGES);
+ var cacheTop = 0;
+ var remainingCacheImages = MAX_CACHE_IMAGES;
+ var pendingTextureRequests = [];
+
+ var TextureImageLoader = function(loadedCallback) {
+ var self = this;
+ var blackPixel = new Uint8Array([0, 0, 0]);
+
+ this.gl = null;
+ this.texture = null;
+ this.callback = null;
+
+ this.image = new Image();
+ this.image.crossOrigin = 'anonymous';
+ this.image.addEventListener('load', function() {
+ var gl = self.gl;
+ gl.bindTexture(gl.TEXTURE_2D, self.texture);
+
+ var startTime = Date.now();
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, self.image);
+
+ if (isPowerOfTwo(self.image.width) && isPowerOfTwo(self.image.height)) {
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
+ gl.generateMipmap(gl.TEXTURE_2D);
+ } else {
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ }
+ var uploadTime = Date.now() - startTime;
+
+ if(self.callback) {
+ var stats = {
+ width: self.image.width,
+ height: self.image.height,
+ internalFormat: gl.RGBA,
+ levelZeroSize: self.image.width * self.image.height * 4,
+ uploadTime: uploadTime
+ };
+ self.callback(self.texture, null, stats);
+ }
+ loadedCallback(self);
+ }, false);
+ this.image.addEventListener('error', function(ev) {
+ clearOnError(self.gl, 'Image could not be loaded: ' + self.image.src, self.texture, self.callback);
+ loadedCallback(self);
+ }, false);
+ };
+
+ TextureImageLoader.prototype.loadTexture = function(gl, src, texture, callback) {
+ this.gl = gl;
+ this.texture = texture;
+ this.callback = callback;
+ this.image.src = src;
+ };
+
+ var PendingTextureRequest = function(gl, src, texture, callback) {
+ this.gl = gl;
+ this.src = src;
+ this.texture = texture;
+ this.callback = callback;
+ };
+
+ function releaseTextureImageLoader(til) {
+ var req;
+ if(pendingTextureRequests.length) {
+ req = pendingTextureRequests.shift();
+ til.loadTexture(req.gl, req.src, req.texture, req.callback);
+ } else {
+ textureImageCache[cacheTop++] = til;
+ }
+ }
+
+ return function(gl, src, texture, callback) {
+ var til;
+
+ if(cacheTop) {
+ til = textureImageCache[--cacheTop];
+ til.loadTexture(gl, src, texture, callback);
+ } else if (remainingCacheImages) {
+ til = new TextureImageLoader(releaseTextureImageLoader);
+ til.loadTexture(gl, src, texture, callback);
+ --remainingCacheImages;
+ } else {
+ pendingTextureRequests.push(new PendingTextureRequest(gl, src, texture, callback));
+ }
+
+ return texture;
+ };
+ })();
+
+ //=====================//
+ // TextureLoader Class //
+ //=====================//
+
+ // This class is our public interface.
+ var TextureLoader = function(gl) {
+ this.gl = gl;
+
+ // Load the compression format extensions, if available
+ this.dxtExt = getExtension(gl, "WEBGL_compressed_texture_s3tc");
+ this.pvrtcExt = getExtension(gl, "WEBGL_compressed_texture_pvrtc");
+ this.atcExt = getExtension(gl, "WEBGL_compressed_texture_atc");
+ this.etc1Ext = getExtension(gl, "WEBGL_compressed_texture_etc1");
+
+ // Returns whether or not the compressed format is supported by the WebGL implementation
+ TextureLoader.prototype._formatSupported = function(format) {
+ switch (format) {
+ case COMPRESSED_RGB_S3TC_DXT1_EXT:
+ case COMPRESSED_RGBA_S3TC_DXT3_EXT:
+ case COMPRESSED_RGBA_S3TC_DXT5_EXT:
+ return !!this.dxtExt;
+
+ case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
+ case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
+ case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
+ case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
+ return !!this.pvrtcExt;
+
+ case COMPRESSED_RGB_ATC_WEBGL:
+ case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
+ case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
+ return !!this.atcExt;
+
+ case COMPRESSED_RGB_ETC1_WEBGL:
+ return !!this.etc1Ext;
+
+ default:
+ return false;
+ }
+ }
+
+ // Uploads compressed texture data to the GPU.
+ TextureLoader.prototype._uploadCompressedData = function(data, width, height, levels, internalFormat, texture, callback) {
+ var gl = this.gl;
+ gl.bindTexture(gl.TEXTURE_2D, texture);
+
+ var offset = 0;
+
+ var stats = {
+ width: width,
+ height: height,
+ internalFormat: internalFormat,
+ levelZeroSize: textureLevelSize(internalFormat, width, height),
+ uploadTime: 0
+ };
+
+ var startTime = Date.now();
+ // Loop through each mip level of compressed texture data provided and upload it to the given texture.
+ for (var i = 0; i < levels; ++i) {
+ // Determine how big this level of compressed texture data is in bytes.
+ var levelSize = textureLevelSize(internalFormat, width, height);
+ // Get a view of the bytes for this level of DXT data.
+ var dxtLevel = new Uint8Array(data.buffer, data.byteOffset + offset, levelSize);
+ // Upload!
+ gl.compressedTexImage2D(gl.TEXTURE_2D, i, internalFormat, width, height, 0, dxtLevel);
+ // The next mip level will be half the height and width of this one.
+ width = width >> 1;
+ height = height >> 1;
+ // Advance the offset into the compressed texture data past the current mip level's data.
+ offset += levelSize;
+ }
+ stats.uploadTime = Date.now() - startTime;
+
+ // We can't use gl.generateMipmaps with compressed textures, so only use
+ // mipmapped filtering if the compressed texture data contained mip levels.
+ if (levels > 1) {
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
+ } else {
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ }
+
+ // Notify the user that the texture is ready.
+ if (callback) { callback(texture, null, stats); }
+ }
+
+ TextureLoader.prototype.supportsDXT = function() {
+ return !!this.dxtExt;
+ }
+
+ TextureLoader.prototype.supportsPVRTC = function() {
+ return !!this.pvrtcExt;
+ }
+
+ TextureLoader.prototype.supportsATC = function() {
+ return !!this.atcExt;
+ }
+
+ TextureLoader.prototype.supportsETC1 = function() {
+ return !!this.etc1Ext;
+ }
+
+ // Loads a image file into the given texture.
+ // Supports any format that can be loaded into an img tag
+ // If no texture is provided one is created and returned.
+ TextureLoader.prototype.loadIMG = function(src, texture, callback) {
+ if(!texture) {
+ texture = this.gl.createTexture();
+ }
+
+ loadImgTexture(gl, src, texture, callback);
+
+ return texture;
+ }
+
+ // Loads a DDS file into the given texture.
+ // If no texture is provided one is created and returned.
+ TextureLoader.prototype.loadDDS = function(src, texture, callback) {
+ var self = this;
+ if (!texture) {
+ texture = this.gl.createTexture();
+ }
+
+ // Load the file via XHR.
+ var xhr = new XMLHttpRequest();
+ xhr.addEventListener('load', function (ev) {
+ if (xhr.status == 200) {
+ // If the file loaded successfully parse it.
+ parseDDS(xhr.response, function(dxtData, width, height, levels, internalFormat) {
+ if (!self._formatSupported(internalFormat)) {
+ clearOnError(self.gl, "Texture format not supported", texture, callback);
+ return;
+ }
+ // Upload the parsed DXT data to the texture.
+ self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
+ }, function(error) {
+ clearOnError(self.gl, error, texture, callback);
+ });
+ } else {
+ clearOnError(self.gl, xhr.statusText, texture, callback);
+ }
+ }, false);
+ xhr.open('GET', src, true);
+ xhr.responseType = 'arraybuffer';
+ xhr.send(null);
+
+ return texture;
+ }
+
+ // Loads a PVR file into the given texture.
+ // If no texture is provided one is created and returned.
+ TextureLoader.prototype.loadPVR = function(src, texture, callback) {
+ var self = this;
+ if(!texture) {
+ texture = this.gl.createTexture();
+ }
+
+ // Load the file via XHR.
+ var xhr = new XMLHttpRequest();
+ xhr.addEventListener('load', function (ev) {
+ if (xhr.status == 200) {
+ // If the file loaded successfully parse it.
+ parsePVR(xhr.response, function(dxtData, width, height, levels, internalFormat) {
+ if (!self._formatSupported(internalFormat)) {
+ clearOnError(self.gl, "Texture format not supported", texture, callback);
+ return;
+ }
+ // Upload the parsed PVR data to the texture.
+ self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
+ }, function(error) {
+ clearOnError(self.gl, error, texture, callback);
+ });
+ } else {
+ clearOnError(self.gl, xhr.statusText, texture, callback);
+ }
+ }, false);
+ xhr.open('GET', src, true);
+ xhr.responseType = 'arraybuffer';
+ xhr.send(null);
+
+ return texture;
+ }
+
+ // Loads a texture from a file. Guesses the type based on extension.
+ // If no texture is provided one is created and returned.
+ TextureLoader.prototype.loadTexture = function(src, texture, callback) {
+ // Shamelessly lifted from StackOverflow :)
+ // http://stackoverflow.com/questions/680929
+ var re = /(?:\.([^.]+))?$/;
+ var ext = re.exec(src)[1] || '';
+ ext = ext.toLowerCase();
+
+ switch(ext) {
+ case 'dds':
+ return this.loadDDS(src, texture, callback);
+ case 'pvr':
+ return this.loadPVR(src, texture, callback);
+ default:
+ return this.loadIMG(src, texture, callback);
+ }
+ }
+
+ // Sets a texture to a solid RGBA color
+ // If no texture is provided one is created and returned.
+ TextureLoader.prototype.makeSolidColor = function(r, g, b, a, texture) {
+ var gl = this.gl;
+ var data = new Uint8Array([r, g, b, a]);
+ if(!texture) {
+ texture = gl.createTexture();
+ }
+ gl.bindTexture(gl.TEXTURE_2D, texture);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
+ return texture;
+ }
+ }
+
+ return TextureLoader;
+})();
diff --git a/tests/html/webvr/js/third-party/wglu/wglu-url.js b/tests/html/webvr/js/third-party/wglu/wglu-url.js
new file mode 100644
index 00000000000..43be65831bd
--- /dev/null
+++ b/tests/html/webvr/js/third-party/wglu/wglu-url.js
@@ -0,0 +1,94 @@
+/*
+Copyright (c) 2015, Brandon Jones.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+*/
+
+/*
+Provides a simple way to get values from the query string if they're present
+and use a default value if not. Not strictly a "WebGL" utility, but I use it
+frequently enough for debugging that I wanted to include it here.
+
+Example:
+For the URL http://example.com/index.html?particleCount=1000
+
+WGLUUrl.getInt("particleCount", 100); // URL overrides, returns 1000
+WGLUUrl.getInt("particleSize", 10); // Not in URL, returns default of 10
+*/
+var WGLUUrl = (function() {
+
+ "use strict";
+
+ var urlArgs = null;
+
+ function ensureArgsCached() {
+ if (!urlArgs) {
+ urlArgs = {};
+ var query = window.location.search.substring(1);
+ var vars = query.split("&");
+ for (var i = 0; i < vars.length; i++) {
+ var pair = vars[i].split("=");
+ urlArgs[pair[0].toLowerCase()] = unescape(pair[1]);
+ }
+ }
+ }
+
+ function getString(name, defaultValue) {
+ ensureArgsCached();
+ var lcaseName = name.toLowerCase();
+ if (lcaseName in urlArgs) {
+ return urlArgs[lcaseName];
+ }
+ return defaultValue;
+ }
+
+ function getInt(name, defaultValue) {
+ ensureArgsCached();
+ var lcaseName = name.toLowerCase();
+ if (lcaseName in urlArgs) {
+ return parseInt(urlArgs[lcaseName], 10);
+ }
+ return defaultValue;
+ }
+
+ function getFloat(name, defaultValue) {
+ ensureArgsCached();
+ var lcaseName = name.toLowerCase();
+ if (lcaseName in urlArgs) {
+ return parseFloat(urlArgs[lcaseName]);
+ }
+ return defaultValue;
+ }
+
+ function getBool(name, defaultValue) {
+ ensureArgsCached();
+ var lcaseName = name.toLowerCase();
+ if (lcaseName in urlArgs) {
+ return parseInt(urlArgs[lcaseName], 10) != 0;
+ }
+ return defaultValue;
+ }
+
+ return {
+ getString: getString,
+ getInt: getInt,
+ getFloat: getFloat,
+ getBool: getBool
+ };
+})();
diff --git a/tests/html/webvr/js/vr-audio-panner.js b/tests/html/webvr/js/vr-audio-panner.js
new file mode 100644
index 00000000000..292d1cc366a
--- /dev/null
+++ b/tests/html/webvr/js/vr-audio-panner.js
@@ -0,0 +1,284 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+(function (VRAudioPanner) {
+
+ 'use strict';
+
+ // Default settings for panning. Cone parameters are experimentally
+ // determined.
+ var _PANNING_MODEL = 'HRTF';
+ var _DISTANCE_MODEL = 'inverse';
+ var _CONE_INNER_ANGLE = 60;
+ var _CONE_OUTER_ANGLE = 120;
+ var _CONE_OUTER_GAIN = 0.25;
+
+ // Super-simple web audio version detection.
+ var _LEGACY_WEBAUDIO = window.hasOwnProperty('webkitAudioContext') && !window.hasOwnProperty('AudioContext');
+ if (_LEGACY_WEBAUDIO)
+ console.log('[VRAudioPanner] outdated version of Web Audio API detected.');
+
+ // Master audio context.
+ var _context = _LEGACY_WEBAUDIO ? new webkitAudioContext() : new AudioContext();
+
+
+ /**
+ * A buffer source player with HRTF panning for testing purpose.
+ * @param {Object} options Default options.
+ * @param {Number} options.gain Sound object gain. (0.0~1.0)
+ * @param {Number} options.buffer AudioBuffer to play.
+ * @param {Number} options.detune Detune parameter. (cent)
+ * @param {Array} options.position x, y, z position in a array.
+ */
+ function TestSource (options) {
+
+ this._src = _context.createBufferSource();
+ this._out = _context.createGain();
+ this._panner = _context.createPanner();
+ this._analyser = _context.createAnalyser();
+
+ this._src.connect(this._out);
+ this._out.connect(this._analyser);
+ this._analyser.connect(this._panner);
+ this._panner.connect(_context.destination);
+
+ this._src.buffer = options.buffer;
+ this._src.loop = true;
+ this._out.gain.value = options.gain;
+
+ this._analyser.fftSize = 1024;
+ this._analyser.smoothingTimeConstant = 0.85;
+ this._lastRMSdB = 0.0;
+
+ this._panner.panningModel = _PANNING_MODEL;
+ this._panner.distanceModel = _DISTANCE_MODEL;
+ this._panner.coneInnerAngle = _CONE_INNER_ANGLE;
+ this._panner.coneOuterAngle = _CONE_OUTER_ANGLE;
+ this._panner.coneOuterGain = _CONE_OUTER_GAIN;
+
+ this._position = [0, 0, 0];
+ this._orientation = [1, 0, 0];
+
+ this._analyserBuffer = new Uint8Array(this._analyser.fftSize);
+
+ if (!_LEGACY_WEBAUDIO) {
+ this._src.detune.value = (options.detune || 0);
+ this._analyserBuffer = new Float32Array(this._analyser.fftSize);
+ }
+
+ this.setPosition(options.position);
+ this.setOrientation(options.orientation);
+
+ };
+
+ TestSource.prototype.start = function () {
+ this._src.start(0);
+ };
+
+ TestSource.prototype.stop = function () {
+ this._src.stop(0);
+ };
+
+ TestSource.prototype.getPosition = function () {
+ return this._position;
+ };
+
+ TestSource.prototype.setPosition = function (position) {
+ if (position) {
+ this._position[0] = position[0];
+ this._position[1] = position[1];
+ this._position[2] = position[2];
+ }
+
+ this._panner.setPosition.apply(this._panner, this._position);
+ };
+
+ TestSource.prototype.getOrientation = function () {
+ return this._orientation;
+ };
+
+ TestSource.prototype.setOrientation = function (orientation) {
+ if (orientation) {
+ this._orientation[0] = orientation[0];
+ this._orientation[1] = orientation[1];
+ this._orientation[2] = orientation[2];
+ }
+
+ this._panner.setOrientation.apply(this._panner, this._orientation);
+ };
+
+ TestSource.prototype.getCubeScale = function () {
+ // Safari does not support getFloatTimeDomainData(), so fallback to the
+ // naive spectral energy sum. This is relative expensive.
+ if (_LEGACY_WEBAUDIO) {
+ this._analyser.getByteFrequencyData(this._analyserBuffer);
+
+ for (var k = 0, total = 0; k < this._analyserBuffer.length; ++k)
+ total += this._analyserBuffer[k];
+ total /= this._analyserBuffer.length;
+
+ return (total / 256.0) * 1.5;
+ }
+
+ this._analyser.getFloatTimeDomainData(this._analyserBuffer);
+ for (var i = 0, sum = 0; i < this._analyserBuffer.length; ++i)
+ sum += this._analyserBuffer[i] * this._analyserBuffer[i];
+
+ // Calculate RMS and convert it to DB for perceptual loudness.
+ var rms = Math.sqrt(sum / this._analyserBuffer.length);
+ var db = 30 + 10 / Math.LN10 * Math.log(rms <= 0 ? 0.0001 : rms);
+
+ // Moving average with the alpha of 0.525. Experimentally determined.
+ this._lastRMSdB += 0.525 * ((db < 0 ? 0 : db) - this._lastRMSdB);
+
+ // Scaling by 1/30 is also experimentally determined.
+ return this._lastRMSdB / 30.0;
+ };
+
+
+ // Internal helper: load a file into a buffer. (github.com/hoch/spiral)
+ function _loadAudioFile(context, fileInfo, done) {
+ var xhr = new XMLHttpRequest();
+ xhr.open('GET', fileInfo.url);
+ xhr.responseType = 'arraybuffer';
+
+ xhr.onload = function () {
+ if (xhr.status === 200) {
+ context.decodeAudioData(xhr.response,
+ function (buffer) {
+ console.log('[VRAudioPanner] File loaded: ' + fileInfo.url);
+ done(fileInfo.name, buffer);
+ },
+ function (message) {
+ console.log('[VRAudioPanner] Decoding failure: ' + fileInfo.url + ' (' + message + ')');
+ done(fileInfo.name, null);
+ });
+ } else {
+ console.log('[VRAudioPanner] XHR Error: ' + fileInfo.url + ' (' + xhr.statusText + ')');
+ done(fileInfo.name, null);
+ }
+ };
+
+ xhr.onerror = function (event) {
+ console.log('[VRAudioPanner] XHR Network failure: ' + fileInfo.url);
+ done(fileInfo.name, null);
+ };
+
+ xhr.send();
+ }
+
+
+ /**
+ * A wrapper/container class for multiple file loaders.
+ * @param {Object} context AudioContext
+ * @param {Object} audioFileData Audio file info in the format of {name, url}
+ * @param {Function} resolve Resolution handler for promise.
+ * @param {Function} reject Rejection handler for promise.
+ * @param {Function} progress Progress event handler.
+ */
+ function AudioBufferManager(context, audioFileData, resolve, reject, progress) {
+ this._context = context;
+ this._resolve = resolve;
+ this._reject = reject;
+ this._progress = progress;
+
+ this._buffers = new Map();
+ this._loadingTasks = {};
+
+ // Iterating file loading.
+ for (var i = 0; i < audioFileData.length; i++) {
+ var fileInfo = audioFileData[i];
+
+ // Check for duplicates filename and quit if it happens.
+ if (this._loadingTasks.hasOwnProperty(fileInfo.name)) {
+ console.log('[VRAudioPanner] Duplicated filename in AudioBufferManager: ' + fileInfo.name);
+ return;
+ }
+
+ // Mark it as pending (0)
+ this._loadingTasks[fileInfo.name] = 0;
+ _loadAudioFile(this._context, fileInfo, this._done.bind(this));
+ }
+ }
+
+ AudioBufferManager.prototype._done = function (filename, buffer) {
+ // Label the loading task.
+ this._loadingTasks[filename] = buffer !== null ? 'loaded' : 'failed';
+
+ // A failed task will be a null buffer.
+ this._buffers.set(filename, buffer);
+
+ this._updateProgress(filename);
+ };
+
+ AudioBufferManager.prototype._updateProgress = function (filename) {
+ var numberOfFinishedTasks = 0, numberOfFailedTask = 0;
+ var numberOfTasks = 0;
+
+ for (var task in this._loadingTasks) {
+ numberOfTasks++;
+ if (this._loadingTasks[task] === 'loaded')
+ numberOfFinishedTasks++;
+ else if (this._loadingTasks[task] === 'failed')
+ numberOfFailedTask++;
+ }
+
+ if (typeof this._progress === 'function')
+ this._progress(filename, numberOfFinishedTasks, numberOfTasks);
+
+ if (numberOfFinishedTasks === numberOfTasks)
+ this._resolve(this._buffers);
+
+ if (numberOfFinishedTasks + numberOfFailedTask === numberOfTasks)
+ this._reject(this._buffers);
+ };
+
+ /**
+ * Returns true if the web audio implementation is outdated.
+ * @return {Boolean}
+ */
+ VRAudioPanner.isWebAudioOutdated = function () {
+ return _LEGACY_WEBAUDIO;
+ }
+
+ /**
+ * Static method for updating listener's position.
+ * @param {Array} position Listener position in x, y, z.
+ */
+ VRAudioPanner.setListenerPosition = function (position) {
+ _context.listener.setPosition.apply(_context.listener, position);
+ };
+
+ /**
+ * Static method for updating listener's orientation.
+ * @param {Array} orientation Listener orientation in x, y, z.
+ * @param {Array} orientation Listener's up vector in x, y, z.
+ */
+ VRAudioPanner.setListenerOrientation = function (orientation, upvector) {
+ _context.listener.setOrientation(
+ orientation[0], orientation[1], orientation[2],
+ upvector[0], upvector[1], upvector[2]);
+ };
+
+ /**
+ * Load an audio file asynchronously.
+ * @param {Array} dataModel Audio file info in the format of {name, url}
+ * @param {Function} onprogress Callback function for reporting the progress.
+ * @return {Promise} Promise.
+ */
+ VRAudioPanner.loadAudioFiles = function (dataModel, onprogress) {
+ return new Promise(function (resolve, reject) {
+ new AudioBufferManager(_context, dataModel, resolve, reject, onprogress);
+ });
+ };
+
+ /**
+ * Create a source player. See TestSource class for parameter description.
+ * @return {TestSource}
+ */
+ VRAudioPanner.createTestSource = function (options) {
+ return new TestSource(options);
+ };
+
+})(VRAudioPanner = {});
diff --git a/tests/html/webvr/js/vr-cube-island.js b/tests/html/webvr/js/vr-cube-island.js
new file mode 100644
index 00000000000..e21a10e11bd
--- /dev/null
+++ b/tests/html/webvr/js/vr-cube-island.js
@@ -0,0 +1,210 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/* global mat4, WGLUProgram */
+
+/*
+Like CubeSea, but designed around a users physical space. One central platform
+that maps to the users play area and several floating cubes that sit just
+those boundries (just to add visual interest)
+*/
+window.VRCubeIsland = (function () {
+ "use strict";
+
+ var cubeIslandVS = [
+ "uniform mat4 projectionMat;",
+ "uniform mat4 modelViewMat;",
+ "attribute vec3 position;",
+ "attribute vec2 texCoord;",
+ "varying vec2 vTexCoord;",
+
+ "void main() {",
+ " vTexCoord = texCoord;",
+ " gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
+ "}",
+ ].join("\n");
+
+ var cubeIslandFS = [
+ "precision mediump float;",
+ "uniform sampler2D diffuse;",
+ "varying vec2 vTexCoord;",
+
+ "void main() {",
+ " gl_FragColor = texture2D(diffuse, vTexCoord);",
+ "}",
+ ].join("\n");
+
+ var CubeIsland = function (gl, texture, width, depth) {
+ this.gl = gl;
+
+ this.statsMat = mat4.create();
+
+ this.texture = texture;
+
+ this.program = new WGLUProgram(gl);
+ this.program.attachShaderSource(cubeIslandVS, gl.VERTEX_SHADER);
+ this.program.attachShaderSource(cubeIslandFS, gl.FRAGMENT_SHADER);
+ this.program.bindAttribLocation({
+ position: 0,
+ texCoord: 1
+ });
+ this.program.link();
+
+ this.vertBuffer = gl.createBuffer();
+ this.indexBuffer = gl.createBuffer();
+
+ this.resize(width, depth);
+ };
+
+ CubeIsland.prototype.resize = function (width, depth) {
+ var gl = this.gl;
+
+ this.width = width;
+ this.depth = depth;
+
+ var cubeVerts = [];
+ var cubeIndices = [];
+
+ // Build a single box.
+ function appendBox (left, bottom, back, right, top, front) {
+ // Bottom
+ var idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 1, idx + 2);
+ cubeIndices.push(idx, idx + 2, idx + 3);
+
+ cubeVerts.push(left, bottom, back, 0.0, 1.0);
+ cubeVerts.push(right, bottom, back, 1.0, 1.0);
+ cubeVerts.push(right, bottom, front, 1.0, 0.0);
+ cubeVerts.push(left, bottom, front, 0.0, 0.0);
+
+ // Top
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 2, idx + 1);
+ cubeIndices.push(idx, idx + 3, idx + 2);
+
+ cubeVerts.push(left, top, back, 0.0, 0.0);
+ cubeVerts.push(right, top, back, 1.0, 0.0);
+ cubeVerts.push(right, top, front, 1.0, 1.0);
+ cubeVerts.push(left, top, front, 0.0, 1.0);
+
+ // Left
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 2, idx + 1);
+ cubeIndices.push(idx, idx + 3, idx + 2);
+
+ cubeVerts.push(left, bottom, back, 0.0, 1.0);
+ cubeVerts.push(left, top, back, 0.0, 0.0);
+ cubeVerts.push(left, top, front, 1.0, 0.0);
+ cubeVerts.push(left, bottom, front, 1.0, 1.0);
+
+ // Right
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 1, idx + 2);
+ cubeIndices.push(idx, idx + 2, idx + 3);
+
+ cubeVerts.push(right, bottom, back, 1.0, 1.0);
+ cubeVerts.push(right, top, back, 1.0, 0.0);
+ cubeVerts.push(right, top, front, 0.0, 0.0);
+ cubeVerts.push(right, bottom, front, 0.0, 1.0);
+
+ // Back
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 2, idx + 1);
+ cubeIndices.push(idx, idx + 3, idx + 2);
+
+ cubeVerts.push(left, bottom, back, 1.0, 1.0);
+ cubeVerts.push(right, bottom, back, 0.0, 1.0);
+ cubeVerts.push(right, top, back, 0.0, 0.0);
+ cubeVerts.push(left, top, back, 1.0, 0.0);
+
+ // Front
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 1, idx + 2);
+ cubeIndices.push(idx, idx + 2, idx + 3);
+
+ cubeVerts.push(left, bottom, front, 0.0, 1.0);
+ cubeVerts.push(right, bottom, front, 1.0, 1.0);
+ cubeVerts.push(right, top, front, 1.0, 0.0);
+ cubeVerts.push(left, top, front, 0.0, 0.0);
+ }
+
+ // Appends a cube with the given centerpoint and size.
+ function appendCube (x, y, z, size) {
+ var halfSize = size * 0.5;
+ appendBox(x - halfSize, y - halfSize, z - halfSize,
+ x + halfSize, y + halfSize, z + halfSize);
+ }
+
+ // Main "island", covers where the user can safely stand. Top of the cube
+ // (the ground the user stands on) should be at Y=0 to align with users
+ // floor. X=0 and Z=0 should be at the center of the users play space.
+ appendBox(-width * 0.5, -width, -depth * 0.5, width * 0.5, 0, depth * 0.5);
+
+ // A sprinkling of other cubes to make things more visually interesting.
+ appendCube(1.1, 0.3, (-depth * 0.5) - 0.8, 0.5);
+ appendCube(-0.5, 1.0, (-depth * 0.5) - 0.9, 0.75);
+ appendCube(0.6, 1.5, (-depth * 0.5) - 0.6, 0.4);
+ appendCube(-1.0, 0.5, (-depth * 0.5) - 0.5, 0.2);
+
+ appendCube((-width * 0.5) - 0.8, 0.3, -1.1, 0.5);
+ appendCube((-width * 0.5) - 0.9, 1.0, 0.5, 0.75);
+ appendCube((-width * 0.5) - 0.6, 1.5, -0.6, 0.4);
+ appendCube((-width * 0.5) - 0.5, 0.5, 1.0, 0.2);
+
+ appendCube((width * 0.5) + 0.8, 0.3, 1.1, 0.5);
+ appendCube((width * 0.5) + 0.9, 1.0, -0.5, 0.75);
+ appendCube((width * 0.5) + 0.6, 1.5, 0.6, 0.4);
+ appendCube((width * 0.5) + 0.5, 0.5, -1.0, 0.2);
+
+ appendCube(1.1, 1.4, (depth * 0.5) + 0.8, 0.5);
+ appendCube(-0.5, 1.0, (depth * 0.5) + 0.9, 0.75);
+ appendCube(0.6, 0.4, (depth * 0.5) + 0.6, 0.4);
+
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
+
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
+
+ this.indexCount = cubeIndices.length;
+ };
+
+ CubeIsland.prototype.render = function (projectionMat, modelViewMat, stats) {
+ var gl = this.gl;
+ var program = this.program;
+
+ program.use();
+
+ gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
+ gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+
+ gl.enableVertexAttribArray(program.attrib.position);
+ gl.enableVertexAttribArray(program.attrib.texCoord);
+
+ gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
+ gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
+
+ gl.activeTexture(gl.TEXTURE0);
+ gl.uniform1i(this.program.uniform.diffuse, 0);
+ gl.bindTexture(gl.TEXTURE_2D, this.texture);
+
+ gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
+
+ if (stats) {
+ // To ensure that the FPS counter is visible in VR mode we have to
+ // render it as part of the scene.
+ mat4.fromTranslation(this.statsMat, [0, 1.5, -this.depth * 0.5]);
+ mat4.scale(this.statsMat, this.statsMat, [0.5, 0.5, 0.5]);
+ mat4.rotateX(this.statsMat, this.statsMat, -0.75);
+ mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
+ stats.render(projectionMat, this.statsMat);
+ }
+ };
+
+ return CubeIsland;
+})();
diff --git a/tests/html/webvr/js/vr-cube-sea.js b/tests/html/webvr/js/vr-cube-sea.js
new file mode 100644
index 00000000000..5002e181639
--- /dev/null
+++ b/tests/html/webvr/js/vr-cube-sea.js
@@ -0,0 +1,188 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/* global mat4, WGLUProgram */
+
+window.VRCubeSea = (function () {
+ "use strict";
+
+ var cubeSeaVS = [
+ "uniform mat4 projectionMat;",
+ "uniform mat4 modelViewMat;",
+ "attribute vec3 position;",
+ "attribute vec2 texCoord;",
+ "varying vec2 vTexCoord;",
+
+ "void main() {",
+ " vTexCoord = texCoord;",
+ " gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
+ "}",
+ ].join("\n");
+
+ var cubeSeaFS = [
+ "precision mediump float;",
+ "uniform sampler2D diffuse;",
+ "varying vec2 vTexCoord;",
+
+ "void main() {",
+ " gl_FragColor = texture2D(diffuse, vTexCoord);",
+ "}",
+ ].join("\n");
+
+ var CubeSea = function (gl, texture) {
+ this.gl = gl;
+
+ this.statsMat = mat4.create();
+
+ this.texture = texture;
+
+ this.program = new WGLUProgram(gl);
+ this.program.attachShaderSource(cubeSeaVS, gl.VERTEX_SHADER);
+ this.program.attachShaderSource(cubeSeaFS, gl.FRAGMENT_SHADER);
+ this.program.bindAttribLocation({
+ position: 0,
+ texCoord: 1
+ });
+ this.program.link();
+
+ var cubeVerts = [];
+ var cubeIndices = [];
+
+ // Build a single cube.
+ function appendCube (x, y, z) {
+ if (!x && !y && !z) {
+ // Don't create a cube in the center.
+ return;
+ }
+
+ var size = 0.2;
+ // Bottom
+ var idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 1, idx + 2);
+ cubeIndices.push(idx, idx + 2, idx + 3);
+
+ cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0);
+ cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0);
+ cubeVerts.push(x + size, y - size, z + size, 1.0, 0.0);
+ cubeVerts.push(x - size, y - size, z + size, 0.0, 0.0);
+
+ // Top
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 2, idx + 1);
+ cubeIndices.push(idx, idx + 3, idx + 2);
+
+ cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0);
+ cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0);
+ cubeVerts.push(x + size, y + size, z + size, 1.0, 1.0);
+ cubeVerts.push(x - size, y + size, z + size, 0.0, 1.0);
+
+ // Left
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 2, idx + 1);
+ cubeIndices.push(idx, idx + 3, idx + 2);
+
+ cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0);
+ cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0);
+ cubeVerts.push(x - size, y + size, z + size, 1.0, 0.0);
+ cubeVerts.push(x - size, y - size, z + size, 1.0, 1.0);
+
+ // Right
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 1, idx + 2);
+ cubeIndices.push(idx, idx + 2, idx + 3);
+
+ cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0);
+ cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0);
+ cubeVerts.push(x + size, y + size, z + size, 0.0, 0.0);
+ cubeVerts.push(x + size, y - size, z + size, 0.0, 1.0);
+
+ // Back
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 2, idx + 1);
+ cubeIndices.push(idx, idx + 3, idx + 2);
+
+ cubeVerts.push(x - size, y - size, z - size, 1.0, 1.0);
+ cubeVerts.push(x + size, y - size, z - size, 0.0, 1.0);
+ cubeVerts.push(x + size, y + size, z - size, 0.0, 0.0);
+ cubeVerts.push(x - size, y + size, z - size, 1.0, 0.0);
+
+ // Front
+ idx = cubeVerts.length / 5.0;
+ cubeIndices.push(idx, idx + 1, idx + 2);
+ cubeIndices.push(idx, idx + 2, idx + 3);
+
+ cubeVerts.push(x - size, y - size, z + size, 0.0, 1.0);
+ cubeVerts.push(x + size, y - size, z + size, 1.0, 1.0);
+ cubeVerts.push(x + size, y + size, z + size, 1.0, 0.0);
+ cubeVerts.push(x - size, y + size, z + size, 0.0, 0.0);
+ }
+
+ var gridSize = 10;
+
+ // Build the cube sea
+ for (var x = 0; x < gridSize; ++x) {
+ for (var y = 0; y < gridSize; ++y) {
+ for (var z = 0; z < gridSize; ++z) {
+ appendCube(x - (gridSize / 2), y - (gridSize / 2), z - (gridSize / 2));
+ }
+ }
+ }
+
+ this.vertBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
+
+ this.indexBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
+
+ this.indexCount = cubeIndices.length;
+ };
+
+ var mortimer = mat4.create();
+
+ var a = [0.9868122935295105, -0.03754837438464165, -0.15745431184768677, 0, 0.011360996402800083, 0.9863911271095276, -0.1640235036611557, 0, 0.16147033870220184, 0.16007155179977417, 0.9738093614578247, 0, 0.192538782954216, 0.024526841938495636, -0.001076754298992455, 1.0000001192092896];
+ for (var i = 0; i < 16; ++i) {
+ mortimer[i] = a[i];
+ }
+
+ CubeSea.prototype.render = function (projectionMat, modelViewMat, stats) {
+ var gl = this.gl;
+ var program = this.program;
+
+ //mat4.invert(mortimer, modelViewMat);
+
+ program.use();
+
+ gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
+ gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+
+ gl.enableVertexAttribArray(program.attrib.position);
+ gl.enableVertexAttribArray(program.attrib.texCoord);
+
+ gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
+ gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
+
+ gl.activeTexture(gl.TEXTURE0);
+ gl.uniform1i(this.program.uniform.diffuse, 0);
+ gl.bindTexture(gl.TEXTURE_2D, this.texture);
+
+ gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
+
+ if (stats) {
+ // To ensure that the FPS counter is visible in VR mode we have to
+ // render it as part of the scene.
+ mat4.fromTranslation(this.statsMat, [0, -0.3, -0.5]);
+ mat4.scale(this.statsMat, this.statsMat, [0.3, 0.3, 0.3]);
+ mat4.rotateX(this.statsMat, this.statsMat, -0.75);
+ mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
+ stats.render(projectionMat, this.statsMat);
+ }
+ };
+
+ return CubeSea;
+})();
diff --git a/tests/html/webvr/js/vr-panorama.js b/tests/html/webvr/js/vr-panorama.js
new file mode 100644
index 00000000000..8eac81e9f05
--- /dev/null
+++ b/tests/html/webvr/js/vr-panorama.js
@@ -0,0 +1,219 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/* global mat4, WGLUProgram */
+
+window.VRPanorama = (function () {
+ "use strict";
+
+ var panoVS = [
+ "uniform mat4 projectionMat;",
+ "uniform mat4 modelViewMat;",
+ "attribute vec3 position;",
+ "attribute vec2 texCoord;",
+ "varying vec2 vTexCoord;",
+
+ "void main() {",
+ " vTexCoord = texCoord;",
+ " gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
+ "}",
+ ].join("\n");
+
+ var panoFS = [
+ "precision mediump float;",
+ "uniform sampler2D diffuse;",
+ "varying vec2 vTexCoord;",
+
+ "void main() {",
+ " gl_FragColor = texture2D(diffuse, vTexCoord);",
+ "}",
+ ].join("\n");
+
+ var Panorama = function (gl) {
+ this.gl = gl;
+
+ this.texture = gl.createTexture();
+
+ this.program = new WGLUProgram(gl);
+ this.program.attachShaderSource(panoVS, gl.VERTEX_SHADER);
+ this.program.attachShaderSource(panoFS, gl.FRAGMENT_SHADER);
+ this.program.bindAttribLocation({
+ position: 0,
+ texCoord: 1
+ });
+ this.program.link();
+
+ var panoVerts = [];
+ var panoIndices = [];
+
+ var radius = 2; // 2 meter radius sphere
+ var latSegments = 40;
+ var lonSegments = 40;
+
+ // Create the vertices
+ for (var i=0; i <= latSegments; ++i) {
+ var theta = i * Math.PI / latSegments;
+ var sinTheta = Math.sin(theta);
+ var cosTheta = Math.cos(theta);
+
+ for (var j=0; j <= lonSegments; ++j) {
+ var phi = j * 2 * Math.PI / lonSegments;
+ var sinPhi = Math.sin(phi);
+ var cosPhi = Math.cos(phi);
+
+ var x = sinPhi * sinTheta;
+ var y = cosTheta;
+ var z = -cosPhi * sinTheta;
+ var u = (j / lonSegments);
+ var v = (i / latSegments);
+
+ panoVerts.push(x * radius, y * radius, z * radius, u, v);
+ }
+ }
+
+ // Create the indices
+ for (var i = 0; i < latSegments; ++i) {
+ var offset0 = i * (lonSegments+1);
+ var offset1 = (i+1) * (lonSegments+1);
+ for (var j = 0; j < lonSegments; ++j) {
+ var index0 = offset0+j;
+ var index1 = offset1+j;
+ panoIndices.push(
+ index0, index1, index0+1,
+ index1, index1+1, index0+1
+ );
+ }
+ }
+
+ this.vertBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(panoVerts), gl.STATIC_DRAW);
+
+ this.indexBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(panoIndices), gl.STATIC_DRAW);
+
+ this.indexCount = panoIndices.length;
+
+ this.imgElement = null;
+ this.videoElement = null;
+ };
+
+ Panorama.prototype.setImage = function (url) {
+ var gl = this.gl;
+ var self = this;
+
+ return new Promise(function(resolve, reject) {
+ var img = new Image();
+ img.addEventListener('load', function() {
+ self.imgElement = img;
+ self.videoElement = null;
+
+ gl.bindTexture(gl.TEXTURE_2D, self.texture);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, img);
+
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+
+ resolve(self.imgElement);
+ });
+ img.addEventListener('error', function(ev) {
+ console.error(ev.message);
+ reject(ev.message);
+ }, false);
+ img.crossOrigin = 'anonymous';
+ img.src = url;
+ });
+ };
+
+ Panorama.prototype.setVideo = function (url) {
+ var gl = this.gl;
+ var self = this;
+
+ return new Promise(function(resolve, reject) {
+ var video = document.createElement('video');
+ video.addEventListener('canplay', function() {
+ // Added "click to play" UI?
+ });
+
+ video.addEventListener('playing', function() {
+ self.videoElement = video;
+ self.imgElement = null;
+
+ gl.bindTexture(gl.TEXTURE_2D, self.texture);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, self.videoElement);
+
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+
+ resolve(self.videoElement);
+ });
+
+ video.addEventListener('error', function(ev) {
+ console.error(video.error);
+ reject(video.error);
+ }, false);
+
+ video.loop = true;
+ video.autoplay = true;
+ video.crossOrigin = 'anonymous';
+ video.setAttribute('webkit-playsinline', '');
+ video.src = url;
+ });
+ };
+
+ Panorama.prototype.play = function() {
+ if (this.videoElement)
+ this.videoElement.play();
+ };
+
+ Panorama.prototype.pause = function() {
+ if (this.videoElement)
+ this.videoElement.pause();
+ };
+
+ Panorama.prototype.isPaused = function() {
+ if (this.videoElement)
+ return this.videoElement.paused;
+ return false;
+ };
+
+ Panorama.prototype.render = function (projectionMat, modelViewMat) {
+ var gl = this.gl;
+ var program = this.program;
+
+ if (!this.imgElement && !this.videoElement)
+ return;
+
+ program.use();
+
+ gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
+ gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
+
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
+
+ gl.enableVertexAttribArray(program.attrib.position);
+ gl.enableVertexAttribArray(program.attrib.texCoord);
+
+ gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
+ gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
+
+ gl.activeTexture(gl.TEXTURE0);
+ gl.uniform1i(this.program.uniform.diffuse, 0);
+ gl.bindTexture(gl.TEXTURE_2D, this.texture);
+
+ if (this.videoElement && !this.videoElement.paused) {
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.videoElement);
+ }
+
+ gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
+ };
+
+ return Panorama;
+})();
diff --git a/tests/html/webvr/js/vr-samples-util.js b/tests/html/webvr/js/vr-samples-util.js
new file mode 100644
index 00000000000..20553bdf870
--- /dev/null
+++ b/tests/html/webvr/js/vr-samples-util.js
@@ -0,0 +1,181 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+window.VRSamplesUtil = (function () {
+
+ "use strict";
+
+ // Lifted from the WebVR Polyfill
+ function isMobile () {
+ return /Android/i.test(navigator.userAgent) ||
+ /iPhone|iPad|iPod/i.test(navigator.userAgent);
+ }
+
+ function getMessageContainer () {
+ var messageContainer = document.getElementById("vr-sample-message-container");
+ if (!messageContainer) {
+ messageContainer = document.createElement("div");
+ messageContainer.id = "vr-sample-message-container";
+ messageContainer.style.fontFamily = "sans-serif";
+ messageContainer.style.position = "absolute";
+ messageContainer.style.zIndex = "999";
+ messageContainer.style.left = "0";
+ messageContainer.style.top = "0";
+ messageContainer.style.right = "0";
+ messageContainer.style.margin = "0";
+ messageContainer.style.padding = "0";
+ messageContainer.align = "center";
+ document.body.appendChild(messageContainer);
+ }
+ return messageContainer;
+ }
+
+ function addMessageElement (message, backgroundColor) {
+ var messageElement = document.createElement("div");
+ messageElement.classList.add = "vr-sample-message";
+ messageElement.style.color = "#FFF";
+ messageElement.style.backgroundColor = backgroundColor;
+ messageElement.style.borderRadius = "3px";
+ messageElement.style.position = "relative";
+ messageElement.style.display = "inline-block";
+ messageElement.style.margin = "0.5em";
+ messageElement.style.padding = "0.75em";
+
+ messageElement.innerHTML = message;
+
+ getMessageContainer().appendChild(messageElement);
+
+ return messageElement;
+ }
+
+ // Makes the given element fade out and remove itself from the DOM after the
+ // given timeout.
+ function makeToast (element, timeout) {
+ element.style.transition = "opacity 0.5s ease-in-out";
+ element.style.opacity = "1";
+ setTimeout(function () {
+ element.style.opacity = "0";
+ setTimeout(function () {
+ if (element.parentElement)
+ element.parentElement.removeChild(element);
+ }, 500);
+ }, timeout);
+ }
+
+ function addError (message, timeout) {
+ var element = addMessageElement("<b>ERROR:</b> " + message, "#D33");
+
+ if (timeout) {
+ makeToast(element, timeout);
+ }
+
+ return element;
+ }
+
+ function addInfo (message, timeout) {
+ var element = addMessageElement(message, "#22A");
+
+ if (timeout) {
+ makeToast(element, timeout);
+ }
+
+ return element;
+ }
+
+ function getButtonContainer () {
+ var buttonContainer = document.getElementById("vr-sample-button-container");
+ if (!buttonContainer) {
+ buttonContainer = document.createElement("div");
+ buttonContainer.id = "vr-sample-button-container";
+ buttonContainer.style.fontFamily = "sans-serif";
+ buttonContainer.style.position = "absolute";
+ buttonContainer.style.zIndex = "999";
+ buttonContainer.style.left = "0";
+ buttonContainer.style.bottom = "0";
+ buttonContainer.style.right = "0";
+ buttonContainer.style.margin = "0";
+ buttonContainer.style.padding = "0";
+ buttonContainer.align = "right";
+ document.body.appendChild(buttonContainer);
+ }
+ return buttonContainer;
+ }
+
+ function addButtonElement (message, key, icon) {
+ var buttonElement = document.createElement("div");
+ buttonElement.classList.add = "vr-sample-button";
+ buttonElement.style.color = "#FFF";
+ buttonElement.style.fontWeight = "bold";
+ buttonElement.style.backgroundColor = "#888";
+ buttonElement.style.borderRadius = "5px";
+ buttonElement.style.border = "3px solid #555";
+ buttonElement.style.position = "relative";
+ buttonElement.style.display = "inline-block";
+ buttonElement.style.margin = "0.5em";
+ buttonElement.style.padding = "0.75em";
+ buttonElement.style.cursor = "pointer";
+ buttonElement.align = "center";
+
+ if (icon) {
+ buttonElement.innerHTML = "<img src='" + icon + "'/><br/>" + message;
+ } else {
+ buttonElement.innerHTML = message;
+ }
+
+ if (key) {
+ var keyElement = document.createElement("span");
+ keyElement.classList.add = "vr-sample-button-accelerator";
+ keyElement.style.fontSize = "0.75em";
+ keyElement.style.fontStyle = "italic";
+ keyElement.innerHTML = " (" + key + ")";
+
+ buttonElement.appendChild(keyElement);
+ }
+
+ getButtonContainer().appendChild(buttonElement);
+
+ return buttonElement;
+ }
+
+ function addButton (message, key, icon, callback) {
+ var keyListener = null;
+ if (key) {
+ var keyCode = key.charCodeAt(0);
+ keyListener = function (event) {
+ if (event.keyCode === keyCode) {
+ callback(event);
+ }
+ };
+ document.addEventListener("keydown", keyListener, false);
+ }
+ var element = addButtonElement(message, key, icon);
+ element.addEventListener("click", function (event) {
+ callback(event);
+ event.preventDefault();
+ }, false);
+
+ return {
+ element: element,
+ keyListener: keyListener
+ };
+ }
+
+ function removeButton (button) {
+ if (!button)
+ return;
+ if (button.element.parentElement)
+ button.element.parentElement.removeChild(button.element);
+ if (button.keyListener)
+ document.removeEventListener("keydown", button.keyListener, false);
+ }
+
+ return {
+ isMobile: isMobile,
+ addError: addError,
+ addInfo: addInfo,
+ addButton: addButton,
+ removeButton: removeButton,
+ makeToast: makeToast
+ };
+})();
diff --git a/tests/html/webvr/media/icons/cardboard64.png b/tests/html/webvr/media/icons/cardboard64.png
new file mode 100644
index 00000000000..9457f7d53ed
--- /dev/null
+++ b/tests/html/webvr/media/icons/cardboard64.png
Binary files differ
diff --git a/tests/html/webvr/media/textures/cube-sea.png b/tests/html/webvr/media/textures/cube-sea.png
new file mode 100644
index 00000000000..356bc3369d3
--- /dev/null
+++ b/tests/html/webvr/media/textures/cube-sea.png
Binary files differ
diff --git a/tests/html/webvr/room-scale.html b/tests/html/webvr/room-scale.html
new file mode 100644
index 00000000000..c39d4f56908
--- /dev/null
+++ b/tests/html/webvr/room-scale.html
@@ -0,0 +1,312 @@
+<!doctype html>
+<!--
+Copyright 2016 The Chromium Authors. All rights reserved.
+Use of this source code is governed by a BSD-style license that can be
+found in the LICENSE file.
+-->
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
+ <meta name="mobile-web-app-capable" content="yes">
+ <meta name="apple-mobile-web-app-capable" content="yes">
+
+ <title>05 - Room Scale</title>
+
+ <!--
+ This sample demonstrates how to create scenes that align with the space
+ physically available to the user (when that information is available).
+ -->
+
+ <style>
+ #webgl-canvas {
+ box-sizing: border-box;
+ height: 100%;
+ left: 0;
+ margin: 0;
+ position: absolute;
+ top: 0;
+ width: 100%;
+ }
+ </style>
+
+ <!-- This entire block in only to facilitate dynamically enabling and
+ disabling the WebVR polyfill, and is not necessary for most WebVR apps.
+ If you want to use the polyfill in your app, just include the js file and
+ everything will work the way you want it to by default. -->
+ <script>
+ var WebVRConfig = {
+ // Prevents the polyfill from initializing automatically.
+ DEFER_INITIALIZATION: true,
+ // Polyfill optimizations
+ DIRTY_SUBMIT_FRAME_BINDINGS: true,
+ BUFFER_SCALE: 0.75,
+ };
+ </script>
+ <script src="js/third-party/webvr-polyfill.js"></script>
+ <script src="js/third-party/wglu/wglu-url.js"></script>
+ <script>
+ // Dynamically turn the polyfill on if requested by the query args.
+ if (WGLUUrl.getBool('polyfill', false)) {
+ InitializeWebVRPolyfill();
+ } else {
+ // Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
+ InitializeSpecShim();
+ }
+ </script>
+ <!-- End sample polyfill enabling logic -->
+
+ <script src="js/third-party/gl-matrix-min.js"></script>
+
+ <script src="js/third-party/wglu/wglu-debug-geometry.js"></script>
+ <script src="js/third-party/wglu/wglu-program.js"></script>
+ <script src="js/third-party/wglu/wglu-stats.js"></script>
+ <script src="js/third-party/wglu/wglu-texture.js"></script>
+
+ <script src="js/vr-cube-island.js"></script>
+ <script src="js/vr-samples-util.js"></script>
+ </head>
+ <body>
+ <canvas id="webgl-canvas"></canvas>
+ <script>
+ /* global mat4, vec3, VRCubeIsland, WGLUDebugGeometry, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
+ (function () {
+ "use strict";
+
+ var PLAYER_HEIGHT = 1.65;
+
+ var vrDisplay = null;
+ var frameData = null;
+ var projectionMat = mat4.create();
+ var viewMat = mat4.create();
+ var vrPresentButton = null;
+
+ // ===================================================
+ // WebGL scene setup. This code is not WebVR specific.
+ // ===================================================
+
+ // WebGL setup.
+ var webglCanvas = document.getElementById("webgl-canvas");
+ var gl = null;
+ var cubeIsland = null;
+ var stats = null;
+ var debugGeom = null;
+
+ function initWebGL (preserveDrawingBuffer) {
+ var glAttribs = {
+ alpha: false,
+ antialias: false, //!VRSamplesUtil.isMobile(),
+ preserveDrawingBuffer: false //preserveDrawingBuffer
+ };
+ gl = webglCanvas.getContext("webgl", glAttribs);
+ if (!gl) {
+ gl = webglCanvas.getContext("experimental-webgl", glAttribs);
+ if (!gl) {
+ VRSamplesUtil.addError("Your browser does not support WebGL.");
+ return;
+ }
+ }
+ gl.clearColor(0.1, 0.2, 0.3, 1.0);
+ gl.enable(gl.DEPTH_TEST);
+ gl.enable(gl.CULL_FACE);
+
+ var textureLoader = new WGLUTextureLoader(gl);
+ var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
+
+ // If the VRDisplay doesn't have stageParameters we won't know
+ // how big the users play space. Construct a scene around a
+ // default space size like 2 meters by 2 meters as a placeholder.
+ cubeIsland = new VRCubeIsland(gl, texture, 2, 2);
+
+ stats = new WGLUStats(gl);
+ debugGeom = new WGLUDebugGeometry(gl);
+
+ // Wait until we have a WebGL context to resize and start rendering.
+ window.addEventListener("resize", onResize, false);
+ onResize();
+ window.requestAnimationFrame(onAnimationFrame);
+ }
+
+ // ================================
+ // WebVR-specific code begins here.
+ // ================================
+
+ function onVRRequestPresent () {
+ vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
+ }, function () {
+ VRSamplesUtil.addError("requestPresent failed.", 2000);
+ });
+ }
+
+ function onVRExitPresent () {
+ if (!vrDisplay.isPresenting)
+ return;
+
+ vrDisplay.exitPresent().then(function () {
+ }, function () {
+ VRSamplesUtil.addError("exitPresent failed.", 2000);
+ });
+ }
+
+ function onVRPresentChange () {
+ onResize();
+
+ if (vrDisplay.isPresenting) {
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
+ }
+ } else {
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+ }
+ }
+ }
+
+ if (navigator.vr) {
+ frameData = new VRFrameData();
+
+ navigator.vr.getDisplays().then(function (displays) {
+ if (displays.length > 0) {
+ vrDisplay = displays[0];
+ vrDisplay.depthNear = 0.1;
+ vrDisplay.depthFar = 1024.0;
+
+ initWebGL(true);
+
+ if (vrDisplay.stageParameters &&
+ vrDisplay.stageParameters.sizeX > 0 &&
+ vrDisplay.stageParameters.sizeZ > 0) {
+ // If we have stageParameters with a valid size use that to resize
+ // our scene to match the users available space more closely. The
+ // check for size > 0 is necessary because some devices, like the
+ // Oculus Rift, can give you a standing space coordinate but don't
+ // have a configured play area. These devices will return a stage
+ // size of 0.
+ cubeIsland.resize(vrDisplay.stageParameters.sizeX, vrDisplay.stageParameters.sizeZ);
+ } else {
+ if (vrDisplay.stageParameters) {
+ VRSamplesUtil.addInfo("VRDisplay reported stageParameters, but stage size was 0. Using default size.", 3000);
+ } else {
+ VRSamplesUtil.addInfo("VRDisplay did not report stageParameters", 3000);
+ }
+ }
+
+ VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
+
+ if (vrDisplay.capabilities.canPresent)
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+
+ vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
+ //vrDisplay.addEventListener('activate', onVRRequestPresent, false);
+ //vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
+ } else {
+ initWebGL(false);
+ VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
+ }
+ });
+ } else if (navigator.getVRDevices) {
+ initWebGL(false);
+ VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
+ } else {
+ initWebGL(false);
+ VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
+ }
+
+ function onResize () {
+ if (vrDisplay && vrDisplay.isPresenting) {
+ var leftEye = vrDisplay.getEyeParameters("left");
+ var rightEye = vrDisplay.getEyeParameters("right");
+
+ webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
+ webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
+ } else {
+ webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
+ webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
+ }
+ }
+
+ // Get a matrix for the pose that takes into account the stageParameters
+ // if we have them, and otherwise adjusts the position to ensure we're
+ // not stuck in the floor.
+ function getStandingViewMatrix (out, view) {
+ if (vrDisplay.stageParameters) {
+ // If the headset provides stageParameters use the
+ // sittingToStandingTransform to transform the view matrix into a
+ // space where the floor in the center of the users play space is the
+ // origin.
+ mat4.invert(out, vrDisplay.stageParameters.sittingToStandingTransform);
+ mat4.multiply(out, view, out);
+ } else {
+ // Otherwise you'll want to translate the view to compensate for the
+ // scene floor being at Y=0. Ideally this should match the user's
+ // height (you may want to make it configurable). For this demo we'll
+ // just assume all human beings are 1.65 meters (~5.4ft) tall.
+ mat4.identity(out);
+ mat4.translate(out, out, [0, PLAYER_HEIGHT, 0]);
+ mat4.invert(out, out);
+ mat4.multiply(out, view, out);
+ }
+ }
+
+ function renderSceneView (projection, view, pose) {
+ cubeIsland.render(projection, view, stats);
+
+ // For fun, draw a blue cube where the players head would have been if
+ // we weren't taking the stageParameters into account. It'll start in
+ // the center of the floor.
+ var orientation = pose.orientation;
+ var position = pose.position;
+ if (!orientation) { orientation = [0, 0, 0, 1]; }
+ if (!position) { position = [0, 0, 0]; }
+ debugGeom.bind(projection, view);
+ debugGeom.drawCube(orientation, position, 0.2, [0, 0, 1, 1]);
+ }
+
+ function onAnimationFrame (t) {
+ stats.begin();
+
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+
+ if (vrDisplay) {
+ vrDisplay.requestAnimationFrame(onAnimationFrame);
+
+ vrDisplay.getFrameData(frameData);
+
+ if (vrDisplay.isPresenting) {
+ gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
+ getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
+ renderSceneView(frameData.leftProjectionMatrix, viewMat, frameData.pose);
+
+ gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
+ getStandingViewMatrix(viewMat, frameData.rightViewMatrix);
+ renderSceneView(frameData.rightProjectionMatrix, viewMat, frameData.pose);
+
+ vrDisplay.submitFrame();
+ } else {
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+ getStandingViewMatrix(viewMat, frameData.leftViewMatrix);
+ renderSceneView(projectionMat, viewMat, frameData.pose);
+ stats.renderOrtho();
+ }
+ } else {
+ window.requestAnimationFrame(onAnimationFrame);
+
+ // No VRDisplay found.
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+ mat4.identity(viewMat);
+ mat4.translate(viewMat, viewMat, [0, -PLAYER_HEIGHT, 0]);
+ cubeIsland.render(projectionMat, viewMat, stats);
+
+ stats.renderOrtho();
+ }
+
+ stats.end();
+ }
+ })();
+ </script>
+ </body>
+</html>
diff --git a/tests/html/webvr/simple-mirroring.html b/tests/html/webvr/simple-mirroring.html
new file mode 100644
index 00000000000..e362e5184ff
--- /dev/null
+++ b/tests/html/webvr/simple-mirroring.html
@@ -0,0 +1,262 @@
+<!doctype html>
+<!--
+Copyright 2016 The Chromium Authors. All rights reserved.
+Use of this source code is governed by a BSD-style license that can be
+found in the LICENSE file.
+-->
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
+ <meta name="mobile-web-app-capable" content="yes">
+ <meta name="apple-mobile-web-app-capable" content="yes">
+
+ <title>04 - Simple Mirroring</title>
+
+ <!--
+ This sample demonstrates how to mirror content to an external display
+ while presenting to a VRDisplay.
+ -->
+
+ <style>
+ #webgl-canvas {
+ box-sizing: border-box;
+ height: 100%;
+ left: 0;
+ margin: 0;
+ position: absolute;
+ top: 0;
+ width: 100%;
+ }
+ </style>
+
+ <!-- This entire block in only to facilitate dynamically enabling and
+ disabling the WebVR polyfill, and is not necessary for most WebVR apps.
+ If you want to use the polyfill in your app, just include the js file and
+ everything will work the way you want it to by default. -->
+ <script>
+ var WebVRConfig = {
+ // Prevents the polyfill from initializing automatically.
+ DEFER_INITIALIZATION: true,
+ // Polyfill optimizations
+ DIRTY_SUBMIT_FRAME_BINDINGS: true,
+ BUFFER_SCALE: 0.75,
+ };
+ </script>
+ <script src="js/third-party/webvr-polyfill.js"></script>
+ <script src="js/third-party/wglu/wglu-url.js"></script>
+ <script>
+ // Dynamically turn the polyfill on if requested by the query args.
+ if (WGLUUrl.getBool('polyfill', false)) {
+ InitializeWebVRPolyfill();
+ } else {
+ // Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
+ InitializeSpecShim();
+ }
+ </script>
+ <!-- End sample polyfill enabling logic -->
+
+ <script src="js/third-party/gl-matrix-min.js"></script>
+
+ <script src="js/third-party/wglu/wglu-program.js"></script>
+ <script src="js/third-party/wglu/wglu-stats.js"></script>
+ <script src="js/third-party/wglu/wglu-texture.js"></script>
+
+ <script src="js/vr-cube-sea.js"></script>
+ <script src="js/vr-samples-util.js"></script>
+ </head>
+ <body>
+ <canvas id="webgl-canvas"></canvas>
+ <script>
+ /* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
+ (function () {
+ "use strict";
+
+ var vrDisplay = null;
+ var frameData = null;
+ var projectionMat = mat4.create();
+ var viewMat = mat4.create();
+ var vrPresentButton = null;
+
+ // ================================
+ // WebVR-specific code begins here.
+ // ================================
+
+ // WebGL setup.
+ var webglCanvas = document.getElementById("webgl-canvas");
+ var gl = null;
+ var cubeSea = null;
+ var stats = null;
+
+ function initWebGL (preserveDrawingBuffer) {
+ // Setting preserveDrawingBuffer to true prevents the canvas from being
+ // implicitly cleared when calling submitFrame or compositing the canvas
+ // on the document. For the simplest form of mirroring we want to create
+ // the canvas with that option enabled. Note that this may incur a
+ // performance penalty, as it may imply that additional copies of the
+ // canvas backbuffer need to be made. As a result, we ONLY want to set
+ // that if we know the VRDisplay has an external display, which is why
+ // we defer WebGL initialization until after we've gotten results back
+ // from navigator.getDisplays and know which device we'll be
+ // presenting with.
+ var glAttribs = {
+ alpha: false,
+ antialias: false, //!VRSamplesUtil.isMobile(),
+ preserveDrawingBuffer: false //preserveDrawingBuffer
+ };
+ gl = webglCanvas.getContext("webgl", glAttribs);
+ if (!gl) {
+ gl = webglCanvas.getContext("experimental-webgl", glAttribs);
+ if (!gl) {
+ VRSamplesUtil.addError("Your browser does not support WebGL.");
+ return;
+ }
+ }
+ gl.clearColor(0.1, 0.2, 0.3, 1.0);
+ gl.enable(gl.DEPTH_TEST);
+ gl.enable(gl.CULL_FACE);
+
+ var textureLoader = new WGLUTextureLoader(gl);
+ var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
+ cubeSea = new VRCubeSea(gl, texture);
+ stats = new WGLUStats(gl);
+
+ // Wait until we have a WebGL context to resize and start rendering.
+ window.addEventListener("resize", onResize, false);
+ onResize();
+ window.requestAnimationFrame(onAnimationFrame);
+ }
+
+ function onVRRequestPresent () {
+ vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
+ }, function () {
+ VRSamplesUtil.addError("requestPresent failed.", 2000);
+ });
+ }
+
+ function onVRExitPresent () {
+ if (!vrDisplay.isPresenting)
+ return;
+
+ vrDisplay.exitPresent().then(function () {
+ }, function () {
+ VRSamplesUtil.addError("exitPresent failed.", 2000);
+ });
+ }
+
+ function onVRPresentChange () {
+ onResize();
+
+ if (vrDisplay.isPresenting) {
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
+ }
+ } else {
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+ }
+ }
+ }
+
+ if (navigator.vr) {
+ frameData = new VRFrameData();
+
+ navigator.vr.getDisplays().then(function (displays) {
+ if (displays.length > 0) {
+ vrDisplay = displays[0];
+ vrDisplay.depthNear = 0.1;
+ vrDisplay.depthFar = 1024.0;
+
+ VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
+
+ if (vrDisplay.capabilities.canPresent)
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+
+ vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
+ //vrDisplay.addEventListener('activate', onVRRequestPresent, false);
+ //vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
+
+ // Only use preserveDrawingBuffer if we have an external display to
+ // mirror to.
+ initWebGL(vrDisplay.capabilities.hasExternalDisplay);
+ } else {
+ initWebGL(false);
+ VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
+ }
+ });
+ } else if (navigator.getVRDevices) {
+ initWebGL(false);
+ VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
+ } else {
+ // No VR means no mirroring, so create WebGL content without
+ // preserveDrawingBuffer
+ initWebGL(false);
+ VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
+ }
+
+ function onResize () {
+ if (vrDisplay && vrDisplay.isPresenting) {
+ // If we're presenting we want to use the drawing buffer size
+ // recommended by the VRDevice, since that will ensure the best
+ // results post-distortion.
+ var leftEye = vrDisplay.getEyeParameters("left");
+ var rightEye = vrDisplay.getEyeParameters("right");
+
+ // For simplicity we're going to render both eyes at the same size,
+ // even if one eye needs less resolution. You can render each eye at
+ // the exact size it needs, but you'll need to adjust the viewports to
+ // account for that.
+ webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
+ webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
+ } else {
+ // We only want to change the size of the canvas drawing buffer to
+ // match the window dimensions when we're not presenting.
+ webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
+ webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
+ }
+ }
+
+ function onAnimationFrame (t) {
+ stats.begin();
+
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+
+ if (vrDisplay) {
+ vrDisplay.requestAnimationFrame(onAnimationFrame);
+
+ vrDisplay.getFrameData(frameData);
+
+ if (vrDisplay.isPresenting) {
+ gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
+ cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats);
+
+ gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
+ cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats);
+
+ vrDisplay.submitFrame();
+ } else {
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+ cubeSea.render(projectionMat, frameData.leftViewMatrix, stats);
+ stats.renderOrtho();
+ }
+ } else {
+ window.requestAnimationFrame(onAnimationFrame);
+
+ // No VRDisplay found.
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+ mat4.identity(viewMat);
+ cubeSea.render(projectionMat, viewMat, stats);
+
+ stats.renderOrtho();
+ }
+
+ stats.end();
+ }
+ })();
+ </script>
+ </body>
+</html>
diff --git a/tests/html/webvr/vr-presentation.html b/tests/html/webvr/vr-presentation.html
new file mode 100644
index 00000000000..3ecb848fab4
--- /dev/null
+++ b/tests/html/webvr/vr-presentation.html
@@ -0,0 +1,307 @@
+<!doctype html>
+<!--
+Copyright 2016 The Chromium Authors. All rights reserved.
+Use of this source code is governed by a BSD-style license that can be
+found in the LICENSE file.
+-->
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
+ <meta name="mobile-web-app-capable" content="yes">
+ <meta name="apple-mobile-web-app-capable" content="yes">
+
+ <title>03 - VR Presentation</title>
+
+ <!--
+ This sample demonstrates how to present the contents of a WebGL canvas to
+ a VRDisplay. The content is not mirrored on the main display while being
+ presented.
+ -->
+
+ <style>
+ #webgl-canvas, #presenting-message {
+ box-sizing: border-box;
+ height: 100%;
+ left: 0;
+ margin: 0;
+ position: absolute;
+ top: 0;
+ width: 100%;
+ }
+ #presenting-message {
+ color: white;
+ font-family: sans-serif;
+ font-size: 2em;
+ font-weight: bold;
+ z-index: 1;
+ text-align: center;
+ padding: 0.5em;
+ background-color: #444;
+ display: none;
+ }
+ </style>
+
+ <!-- This entire block in only to facilitate dynamically enabling and
+ disabling the WebVR polyfill, and is not necessary for most WebVR apps.
+ If you want to use the polyfill in your app, just include the js file and
+ everything will work the way you want it to by default. -->
+ <script>
+ var WebVRConfig = {
+ // Prevents the polyfill from initializing automatically.
+ DEFER_INITIALIZATION: true,
+ // Polyfill optimizations
+ DIRTY_SUBMIT_FRAME_BINDINGS: true,
+ BUFFER_SCALE: 0.75,
+ };
+ </script>
+ <script src="js/third-party/webvr-polyfill.js"></script>
+ <script src="js/third-party/wglu/wglu-url.js"></script>
+ <script>
+ // Dynamically turn the polyfill on if requested by the query args.
+ if (WGLUUrl.getBool('polyfill', false)) {
+ InitializeWebVRPolyfill();
+ } else {
+ // Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
+ InitializeSpecShim();
+ }
+ </script>
+ <!-- End sample polyfill enabling logic -->
+
+ <script src="js/third-party/gl-matrix-min.js"></script>
+
+ <script src="js/third-party/wglu/wglu-program.js"></script>
+ <script src="js/third-party/wglu/wglu-stats.js"></script>
+ <script src="js/third-party/wglu/wglu-texture.js"></script>
+
+ <script src="js/vr-cube-sea.js"></script>
+ <script src="js/vr-samples-util.js"></script>
+ </head>
+ <body>
+ <canvas id="webgl-canvas"></canvas>
+ <div id="presenting-message">Put on your headset now</div>
+ <script>
+ /* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
+ (function () {
+ "use strict";
+
+ var vrDisplay = null;
+ var frameData = null;
+ var projectionMat = mat4.create();
+ var viewMat = mat4.create();
+
+ var vrPresentButton = null;
+
+ // ===================================================
+ // WebGL scene setup. This code is not WebVR specific.
+ // ===================================================
+
+ // WebGL setup.
+ var webglCanvas = document.getElementById("webgl-canvas");
+ var glAttribs = {
+ alpha: false,
+ antialias: false //!VRSamplesUtil.isMobile()
+ };
+ var gl = webglCanvas.getContext("webgl", glAttribs);
+ if (!gl) {
+ gl = webglCanvas.getContext("experimental-webgl", glAttribs);
+ if (!gl) {
+ VRSamplesUtil.addError("Your browser does not support WebGL.");
+ return;
+ }
+ }
+ gl.clearColor(0.1, 0.2, 0.3, 1.0);
+ gl.enable(gl.DEPTH_TEST);
+ gl.enable(gl.CULL_FACE);
+
+ var textureLoader = new WGLUTextureLoader(gl);
+ var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
+ var cubeSea = new VRCubeSea(gl, texture);
+
+ var stats = new WGLUStats(gl);
+
+ var presentingMessage = document.getElementById("presenting-message");
+
+ // ================================
+ // WebVR-specific code begins here.
+ // ================================
+
+ function onVRRequestPresent () {
+ // This can only be called in response to a user gesture.
+ vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
+ onVRPresentChange();
+ // Nothing to do because we're handling things in onVRPresentChange.
+ }, function () {
+ VRSamplesUtil.addError("requestPresent failed.", 2000);
+ });
+ }
+
+ function onVRExitPresent () {
+ // No sense in exiting presentation if we're not actually presenting.
+ // (This may happen if we get an event like vrdisplaydeactivate when
+ // we weren't presenting.)
+ if (!vrDisplay.isPresenting)
+ return;
+
+ vrDisplay.exitPresent().then(function () {
+ // Nothing to do because we're handling things in onVRPresentChange.
+ }, function () {
+ VRSamplesUtil.addError("exitPresent failed.", 2000);
+ });
+ }
+
+ function onVRPresentChange () {
+ // When we begin or end presenting, the canvas should be resized to the
+ // recommended dimensions for the display.
+ onResize();
+
+ if (vrDisplay.isPresenting) {
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ // Because we're not mirroring any images on an external screen will
+ // freeze while presenting. It's better to replace it with a message
+ // indicating that content is being shown on the VRDisplay.
+ presentingMessage.style.display = "block";
+
+ // On devices with an external display the UA may not provide a way
+ // to exit VR presentation mode, so we should provide one ourselves.
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
+ }
+ } else {
+ // If we have an external display take down the presenting message and
+ // change the button back to "Enter VR".
+ if (vrDisplay.capabilities.hasExternalDisplay) {
+ presentingMessage.style.display = "";
+
+ VRSamplesUtil.removeButton(vrPresentButton);
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+ }
+ }
+ }
+
+ if (navigator.vr) {
+ frameData = new VRFrameData();
+
+ navigator.vr.getDisplays().then(function (displays) {
+ if (displays.length > 0) {
+ vrDisplay = displays[0];
+
+ // It's heighly reccommended that you set the near and far planes to
+ // something appropriate for your scene so the projection matricies
+ // WebVR produces have a well scaled depth buffer.
+ vrDisplay.depthNear = 0.1;
+ vrDisplay.depthFar = 1024.0;
+
+ VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
+
+ // Generally, you want to wait until VR support is confirmed and
+ // you know the user has a VRDisplay capable of presenting connected
+ // before adding UI that advertises VR features.
+ if (vrDisplay.capabilities.canPresent)
+ vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
+
+ // The UA may kick us out of VR present mode for any reason, so to
+ // ensure we always know when we begin/end presenting we need to
+ // listen for vrdisplaypresentchange events.
+ vrDisplay.addEventListener('presentchange', onVRPresentChange, false);
+
+ // These events fire when the user agent has had some indication that
+ // it would be appropariate to enter or exit VR presentation mode, such
+ // as the user putting on a headset and triggering a proximity sensor.
+ // You can inspect the `reason` property of the event to learn why the
+ // event was fired, but in this case we're going to always trust the
+ // event and enter or exit VR presentation mode when asked.
+ //vrDisplay.addEventListener('activate', onVRRequestPresent, false);
+ //vrDisplay.addEventListener('deactivate', onVRExitPresent, false);
+ } else {
+ VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
+ }
+ });
+ } else if (navigator.getVRDevices) {
+ VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
+ } else {
+ VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
+ }
+
+ function onResize () {
+ if (vrDisplay && vrDisplay.isPresenting) {
+ // If we're presenting we want to use the drawing buffer size
+ // recommended by the VRDevice, since that will ensure the best
+ // results post-distortion.
+ var leftEye = vrDisplay.getEyeParameters("left");
+ var rightEye = vrDisplay.getEyeParameters("right");
+
+ // For simplicity we're going to render both eyes at the same size,
+ // even if one eye needs less resolution. You can render each eye at
+ // the exact size it needs, but you'll need to adjust the viewports to
+ // account for that.
+ webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
+ webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
+ } else {
+ // We only want to change the size of the canvas drawing buffer to
+ // match the window dimensions when we're not presenting.
+ webglCanvas.width = window.innerWidth * window.devicePixelRatio * 2;
+ webglCanvas.height = window.innerHeight * window.devicePixelRatio * 2;
+ }
+ }
+ window.addEventListener("resize", onResize, false);
+ onResize();
+
+ function onAnimationFrame (t) {
+ stats.begin();
+
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+
+ if (vrDisplay) {
+ // When presenting content to the VRDisplay we want to update at its
+ // refresh rate if it differs from the refresh rate of the main
+ // display. Calling VRDisplay.requestAnimationFrame ensures we render
+ // at the right speed for VR.
+ vrDisplay.requestAnimationFrame(onAnimationFrame);
+
+ // As a general rule you want to get the pose as late as possible
+ // and call VRDisplay.submitFrame as early as possible after
+ // retrieving the pose. Do any work for the frame that doesn't need
+ // to know the pose earlier to ensure the lowest latency possible.
+ //var pose = vrDisplay.getPose();
+ vrDisplay.getFrameData(frameData);
+
+ if (vrDisplay.isPresenting) {
+ // When presenting render a stereo view.
+ gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
+ cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats);
+
+ gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
+ cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats);
+
+ // If we're currently presenting to the VRDisplay we need to
+ // explicitly indicate we're done rendering.
+ vrDisplay.submitFrame();
+ } else {
+ // When not presenting render a mono view that still takes pose into
+ // account.
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ // It's best to use our own projection matrix in this case, but we can use the left eye's view matrix
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+ cubeSea.render(projectionMat, frameData.leftViewMatrix, stats);
+ stats.renderOrtho();
+ }
+ } else {
+ window.requestAnimationFrame(onAnimationFrame);
+
+ // No VRDisplay found.
+ gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
+ mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
+ mat4.identity(viewMat);
+ cubeSea.render(projectionMat, viewMat, stats);
+
+ stats.renderOrtho();
+ }
+
+ stats.end();
+ }
+ window.requestAnimationFrame(onAnimationFrame);
+ })();
+ </script>
+ </body>
+</html>