forked from mozilla/naf-janus-adapter
-
Notifications
You must be signed in to change notification settings - Fork 9
/
audio-with-camera.html
228 lines (205 loc) · 8.79 KB
/
audio-with-camera.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Audio With Camera Example — naf-janus-adapter</title>
<meta name="description" content="Audio With Camera Example — naf-janus-adapter" />
<script src="https://aframe.io/releases/1.6.0/aframe.min.js" crossorigin="anonymous"></script>
<script
src="https://unpkg.com/networked-aframe@^0.12.0/dist/networked-aframe.min.js"
crossorigin="anonymous"
></script>
<script src="https://webrtc.github.io/adapter/adapter-latest.js" crossorigin="anonymous"></script>
<script src="./dist/naf-janus-adapter.js"></script>
<script src="./js/audio-system.js"></script>
<script
src="https://cdn.jsdelivr.net/npm/[email protected]/dist/aframe-randomizer-components.min.js"
crossorigin="anonymous"
></script>
<script
src="https://cdn.jsdelivr.net/npm/[email protected]/dist/aframe-environment-component.min.js"
crossorigin="anonymous"
></script>
<script>
// see issue https://github.com/networked-aframe/networked-aframe/issues/267
NAF.schemas.getComponentsOriginal = NAF.schemas.getComponents;
NAF.schemas.getComponents = (template) => {
if (!NAF.schemas.hasTemplate("#avatar-template")) {
NAF.schemas.add({
template: "#avatar-template",
components: [
{
component: "position",
requiresNetworkUpdate: NAF.utils.vectorRequiresUpdate(0.001)
},
{
component: "rotation",
requiresNetworkUpdate: NAF.utils.vectorRequiresUpdate(0.5)
},
{
selector: ".head",
component: "material",
property: "color",
},
],
});
}
const components = NAF.schemas.getComponentsOriginal(template);
return components;
};
</script>
<link rel="stylesheet" type="text/css" href="./css/style.css" />
</head>
<body>
<a-scene
audio
networked-scene="
room: audiovideo;
debug: false;
adapter: janus;
connectOnLoad: true;
"
renderer="physicallyCorrectLights: true;"
>
<a-assets>
<!-- Templates -->
<!-- Avatar -->
<template id="avatar-template">
<a-entity class="avatar" networked-audio-source="positional: true">
<a-plane
color="#fff"
width="4"
height="3"
position="0 .6 0"
material="side: back"
networked-video-source
></a-plane>
<a-sphere class="head" scale="0.45 0.5 0.4"></a-sphere>
<a-entity class="face" position="0 0.05 0">
<a-sphere class="eye" color="#efefef" position="0.16 0.1 -0.35" scale="0.12 0.12 0.12">
<a-sphere class="pupil" color="#000" position="0 0 -1" scale="0.2 0.2 0.2"></a-sphere>
</a-sphere>
<a-sphere class="eye" color="#efefef" position="-0.16 0.1 -0.35" scale="0.12 0.12 0.12">
<a-sphere class="pupil" color="#000" position="0 0 -1" scale="0.2 0.2 0.2"></a-sphere>
</a-sphere>
</a-entity>
</a-entity>
</template>
<!-- /Templates -->
</a-assets>
<a-entity id="rig">
<a-entity
id="player"
networked="template:#avatar-template;attachTemplateToLocal:false;"
camera
position="0 1.6 0"
wasd-controls
look-controls
>
<a-sphere class="head" visible="false" random-color></a-sphere>
</a-entity>
</a-entity>
<a-entity environment="preset:starry;groundColor:#000000"></a-entity>
<a-entity light="type:ambient;intensity:0.5"></a-entity>
</a-scene>
<div class="actions">
<button id="mic-btn" type="button" class="button">Mute Mic</button>
<button id="camera-btn" type="button" class="button">Show Camera</button>
<button id="scene-btn" type="button" class="button">Start streaming scene</button>
</div>
<script>
function genClientId() {
return String(crypto.getRandomValues(new Uint32Array(1))[0]);
}
const state = {};
state.micEnabled = true; // set it to false if you want to be muted initially.
state.currentStream = null;
state.cameraEnabled = false;
state.sceneStreamingEnabled = false;
// Prompt for audio.
document.addEventListener("DOMContentLoaded", () => {
const scene = document.querySelector("a-scene");
const micBtnEl = document.getElementById("mic-btn");
const cameraBtnEl = document.getElementById("camera-btn");
const sceneBtnEl = document.getElementById("scene-btn");
// Handle mic button click (Mute and Unmute)
micBtnEl.addEventListener("click", function () {
NAF.connection.adapter.enableMicrophone(!state.micEnabled);
state.micEnabled = !state.micEnabled;
micBtnEl.textContent = state.micEnabled ? "Mute Mic" : "Unmute Mic";
});
micBtnEl.textContent = state.micEnabled ? "Mute Mic" : "Unmute Mic";
const stopAndRemoveVideoTrack = async () => {
const videoTracks = state.currentStream.getVideoTracks();
if (videoTracks.length > 0) {
videoTracks[0].stop();
state.currentStream.removeTrack(videoTracks[0]);
await NAF.connection.adapter.setLocalMediaStream(state.currentStream);
}
};
// Handle camera button click (Show and Hide)
cameraBtnEl.addEventListener("click", async () => {
if (state.sceneStreamingEnabled) {
await stopAndRemoveVideoTrack();
state.sceneStreamingEnabled = false;
sceneBtnEl.textContent = "Start streaming scene";
}
if (state.cameraEnabled) {
await stopAndRemoveVideoTrack();
} else {
const stream = await navigator.mediaDevices.getUserMedia({
video: { mediaSource: "camera", width: { max: 1280, ideal: 640 }, height: { ideal: 360 }, frameRate: 30 },
});
state.currentStream.addTrack(stream.getVideoTracks()[0]);
await NAF.connection.adapter.setLocalMediaStream(state.currentStream);
}
state.cameraEnabled = !state.cameraEnabled;
cameraBtnEl.textContent = state.cameraEnabled ? "Hide Camera" : "Show Camera";
});
// Handle scene streaming button click (Start and Stop)
sceneBtnEl.addEventListener("click", async () => {
if (state.cameraEnabled) {
await stopAndRemoveVideoTrack();
state.cameraEnabled = false;
cameraBtnEl.textContent = "Show Camera";
}
if (state.sceneStreamingEnabled) {
await stopAndRemoveVideoTrack();
} else {
const stream = scene.canvas.captureStream(30);
state.currentStream.addTrack(stream.getVideoTracks()[0]);
await NAF.connection.adapter.setLocalMediaStream(state.currentStream);
}
state.sceneStreamingEnabled = !state.sceneStreamingEnabled;
sceneBtnEl.textContent = state.sceneStreamingEnabled ? "Stop streaming scene" : "Start streaming scene";
});
// TODO add example of tab screenshare with audio
// audioSystem.addStreamToOutboundAudio("screenshare", newStream);
scene.addEventListener("adapter-ready", ({ detail: adapter }) => {
// We don't use the syncOccupants API, set requestedOccupants to be the same array instance as availableOccupants
adapter.requestedOccupants = adapter.availableOccupants;
const clientId = genClientId(); // generate a random 16 characters string, but you can use a uuid4 for example
adapter.setClientId(clientId);
navigator.mediaDevices
.getUserMedia({ audio: { echoCancellation: true, noiseSuppression: true, autoGainControl: true }})
.then((stream) => {
const audioSystem = scene.systems.audio;
audioSystem.addStreamToOutboundAudio("microphone", stream);
state.currentStream = audioSystem.outboundStream;
adapter.setLocalMediaStream(audioSystem.outboundStream).then(() => {
// Note that networked-scene audio:true option has no effect with the janus adapter
adapter.enableMicrophone(state.micEnabled);
});
})
.catch((err) => {
console.warn("Microphone access not allowed. This client will not broadcast audio.");
});
});
});
// Called by Networked-Aframe when connected to server
function onConnect() {
console.log("onConnect", new Date());
}
</script>
</body>
</html>