From 9dd9aaacdf8ed819e14ee2f7aa00e2b50ba6d220 Mon Sep 17 00:00:00 2001 From: Brandon Jones Date: Wed, 15 Feb 2017 12:05:04 -0800 Subject: [PATCH] Several explainer updates * Update FrameOfReference/CoordinateSystem based on updates from #178 * Rename VRFrameData to VRDisplayPose to avoid name conflicts w/ 1.1 * Made a few functions return promises * A few typo fixes --- explainer.md | 43 +++++++++++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 14 deletions(-) diff --git a/explainer.md b/explainer.md index 6e7b83c5..afd06d1b 100644 --- a/explainer.md +++ b/explainer.md @@ -28,7 +28,7 @@ Also, while input is an important part of the full VR experience it's a large en Given the marketing of early VR hardware to gamers, one may naturally assume that this API will primarily be used for development of games. While that’s certainly something we expect to see given the history of the WebGL API, which is tightly related, we’ll probably see far more “long tail”-style content than large-scale games. Broadly, VR content on the web will likely cover areas that do not cleanly fit into the app-store models being used as the primary distribution methods by all the major VR hardware providers, or where the content itself is not permitted by the store guidelines. Some high level examples are: ### Video -360° and 3D video are areas of immense interest (for example, see [ABC’s 360° video coverage of the upcoming US election](http://abcnews.go.com/US/fullpage/abc-news-vr-virtual-reality-news-stories-33768357)), and the web has proven massively effective at distributing video in the past. A VR-enabled video player would, upon detecting the presence of VR hardware, show a “View in VR” button, similar to the “Fullscreen” buttons present in today’s video players. When the user clicks that button, a video would render in the headset and respond to natural head movement. Traditional 2D video could also be presented in the headset as though the user is sitting in front of a theater-sized screen, providing a more immersive experience. +360° and 3D video are areas of immense interest (for example, see [ABC’s 360° video coverage](http://abcnews.go.com/US/fullpage/abc-news-vr-virtual-reality-news-stories-33768357)), and the web has proven massively effective at distributing video in the past. A VR-enabled video player would, upon detecting the presence of VR hardware, show a “View in VR” button, similar to the “Fullscreen” buttons present in today’s video players. When the user clicks that button, a video would render in the headset and respond to natural head movement. Traditional 2D video could also be presented in the headset as though the user is sitting in front of a theater-sized screen, providing a more immersive experience. ### Object/data visualization Sites can provide easy 3D visualizations through WebVR, often as a progressive improvement to their more traditional rendering. Viewing 3D models (e.g., [SketchFab](https://sketchfab.com/)), architectural previsualizations, medical imaging, mapping, and [basic data visualization](http://graphics.wsj.com/3d-nasdaq/) can all be more impactful, easier to understand, and convey an accurate sense of scale in VR. For those use cases, few users would justify installing a native app, especially when web content is simply a link or a click away. @@ -98,7 +98,7 @@ async function OnVRAvailable() { ### Beginning a VR session -Clicking that button will attempt to initiate a [`VRSession`](https://w3c.github.io/webvr/#interface-vrsession), which manages input and output for the display. When creating a session with `VRDisplay.requestSession` the capabilities that the returned session must have are passed in via a dictionary, exactly like the `supportsSession` call. If `supportsSession` returned true for a given dictionary then calling `requestingSession` with the same dictionary values should be reasonably expected to succeed, barring external factors (such as `requestSession` not being called in a user gesture or another page currently having an active session for the same display.) +Clicking that button will attempt to initiate a [`VRSession`](https://w3c.github.io/webvr/#interface-vrsession), which manages input and output for the display. When creating a session with `VRDisplay.requestSession` the capabilities that the returned session must have are passed in via a dictionary, exactly like the `supportsSession` call. If `supportsSession` returned true for a given dictionary then calling `requestSession` with the same dictionary values should be reasonably expected to succeed, barring external factors (such as `requestSession` not being called in a user gesture or another page currently having an active session for the same display.) The content to present to the display is defined by a `VRLayer`. In the initial version of the spec only one layer type, `VRCanvasLayer`, is defined and only one layer can be used at a time. This is set via the `VRSession.baseLayer` attribute. (`baseLayer` because future versions of the spec will likely enable multiple layer, at which point this would act like the `firstChild` attribute of a DOM element.) @@ -447,8 +447,9 @@ interface VRSession : EventTarget { VRSourceProperties getSourceProperties(optional float scale); - VRFrameOfReference? getFrameOfReference(VRFrameOfReferenceInit options); - VRFrameData? getFrameData(VRFrameOfReference frameOfReference); + Promise createFrameOfReference(VRFrameOfReferenceType type); + VRDisplayPose? getDisplayPose(VRCoordinateSystem coordinateSystem); + Promise getPlayAreaBounds(VRCoordinateSystem coordinateSystem); Promise commit(); Promise endSession(); @@ -458,7 +459,7 @@ interface VRSession : EventTarget { // Pose // -interface VRFrameData { +interface VRDisplayPose { readonly attribute Float32Array leftProjectionMatrix; readonly attribute Float32Array leftViewMatrix; @@ -482,24 +483,38 @@ interface VRCanvasLayer : VRLayer { attribute VRCanvasSource source; void setLeftBounds(float left, float bottom, float right, float top); - sequence getLeftBounds(); + FrozenArray getLeftBounds(); void setRightBounds(float left, float bottom, float right, float top); - sequence getRightBounds(); + FrozenArray getRightBounds(); }; // -// Frame of Reference +// Coordinate Systems // -dictionary VRFrameOfReferenceInit { - attribute boolean position = true; - attribute boolean floorRelative = false; +interface VRCoordinateSystem { + Float32Array? getTransformTo(VRCoordinateSystem other); }; -interface VRFrameOfReference { - readonly attribute boolean position; - readonly attribute boolean floorRelative; +enum VRFrameOfReferenceType { + "EyeLevel", + "FloorLevel", +}; + +interface VRFrameOfReference : VRCoordinateSystem { + readonly attribute VRFrameOfReferenceType type; +}; + +// +// Play Area Bounds +// + +interface VRPlayAreaBounds { + readonly attribute float minX; + readonly attribute float maxX; + readonly attribute float minZ; + readonly attribute float maxZ; }; //