9ca77253628ea4b0f6b4b9b0e7ea0203fb44bd45
[WebKit-https.git] / Source / WebCore / Modules / mediastream / MediaStreamTrack.cpp
1 /*
2  * Copyright (C) 2011 Google Inc. All rights reserved.
3  * Copyright (C) 2011, 2015 Ericsson AB. All rights reserved.
4  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
5  * Copyright (C) 2013 Nokia Corporation and/or its subsidiary(-ies).
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19  * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
20  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
23  * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26  */
27
28 #include "config.h"
29 #include "MediaStreamTrack.h"
30
31 #if ENABLE(MEDIA_STREAM)
32
33 #include "Document.h"
34 #include "Event.h"
35 #include "EventNames.h"
36 #include "JSOverconstrainedError.h"
37 #include "MediaConstraints.h"
38 #include "MediaStream.h"
39 #include "MediaStreamPrivate.h"
40 #include "NotImplemented.h"
41 #include "OverconstrainedError.h"
42 #include "Page.h"
43 #include "RealtimeMediaSourceCenter.h"
44 #include "ScriptExecutionContext.h"
45 #include <wtf/NeverDestroyed.h>
46
47 namespace WebCore {
48
49 Ref<MediaStreamTrack> MediaStreamTrack::create(ScriptExecutionContext& context, Ref<MediaStreamTrackPrivate>&& privateTrack)
50 {
51     return adoptRef(*new MediaStreamTrack(context, WTFMove(privateTrack)));
52 }
53
54 MediaStreamTrack::MediaStreamTrack(ScriptExecutionContext& context, Ref<MediaStreamTrackPrivate>&& privateTrack)
55     : ActiveDOMObject(&context)
56     , m_private(WTFMove(privateTrack))
57     , m_taskQueue(context)
58 {
59     suspendIfNeeded();
60
61     m_private->addObserver(*this);
62
63     if (auto document = this->document())
64         document->addAudioProducer(*this);
65 }
66
67 MediaStreamTrack::~MediaStreamTrack()
68 {
69     m_private->removeObserver(*this);
70
71     if (auto document = this->document())
72         document->removeAudioProducer(*this);
73 }
74
75 const AtomicString& MediaStreamTrack::kind() const
76 {
77     static NeverDestroyed<AtomicString> audioKind("audio", AtomicString::ConstructFromLiteral);
78     static NeverDestroyed<AtomicString> videoKind("video", AtomicString::ConstructFromLiteral);
79
80     if (m_private->type() == RealtimeMediaSource::Type::Audio)
81         return audioKind;
82     return videoKind;
83 }
84
85 const String& MediaStreamTrack::id() const
86 {
87     return m_private->id();
88 }
89
90 const String& MediaStreamTrack::label() const
91 {
92     return m_private->label();
93 }
94
95 const AtomicString& MediaStreamTrack::contentHint() const
96 {
97     static NeverDestroyed<const AtomicString> speechHint("speech", AtomicString::ConstructFromLiteral);
98     static NeverDestroyed<const AtomicString> musicHint("music", AtomicString::ConstructFromLiteral);
99     static NeverDestroyed<const AtomicString> detailHint("detail", AtomicString::ConstructFromLiteral);
100     static NeverDestroyed<const AtomicString> textHint("text", AtomicString::ConstructFromLiteral);
101     static NeverDestroyed<const AtomicString> motionHint("motion", AtomicString::ConstructFromLiteral);
102
103     switch (m_private->contentHint()) {
104     case MediaStreamTrackPrivate::HintValue::Empty:
105         return emptyAtom();
106     case MediaStreamTrackPrivate::HintValue::Speech:
107         return speechHint;
108     case MediaStreamTrackPrivate::HintValue::Music:
109         return musicHint;
110     case MediaStreamTrackPrivate::HintValue::Motion:
111         return motionHint;
112     case MediaStreamTrackPrivate::HintValue::Detail:
113         return detailHint;
114     case MediaStreamTrackPrivate::HintValue::Text:
115         return textHint;
116     default:
117         return emptyAtom();
118     }
119 }
120
121 void MediaStreamTrack::setContentHint(const String& hintValue)
122 {
123     MediaStreamTrackPrivate::HintValue value;
124     if (m_private->type() == RealtimeMediaSource::Type::Audio) {
125         if (hintValue == "")
126             value = MediaStreamTrackPrivate::HintValue::Empty;
127         else if (hintValue == "speech")
128             value = MediaStreamTrackPrivate::HintValue::Speech;
129         else if (hintValue == "music")
130             value = MediaStreamTrackPrivate::HintValue::Music;
131         else
132             return;
133     } else {
134         if (hintValue == "")
135             value = MediaStreamTrackPrivate::HintValue::Empty;
136         else if (hintValue == "detail")
137             value = MediaStreamTrackPrivate::HintValue::Detail;
138         else if (hintValue == "motion")
139             value = MediaStreamTrackPrivate::HintValue::Motion;
140         else if (hintValue == "text")
141             value = MediaStreamTrackPrivate::HintValue::Text;
142         else
143             return;
144     }
145     m_private->setContentHint(value);
146 }
147
148 bool MediaStreamTrack::enabled() const
149 {
150     return m_private->enabled();
151 }
152
153 void MediaStreamTrack::setEnabled(bool enabled)
154 {
155     m_private->setEnabled(enabled);
156 }
157
158 bool MediaStreamTrack::muted() const
159 {
160     return m_private->muted();
161 }
162
163 auto MediaStreamTrack::readyState() const -> State
164 {
165     return ended() ? State::Ended : State::Live;
166 }
167
168 bool MediaStreamTrack::ended() const
169 {
170     return m_ended || m_private->ended();
171 }
172
173 RefPtr<MediaStreamTrack> MediaStreamTrack::clone()
174 {
175     if (!scriptExecutionContext())
176         return nullptr;
177
178     return MediaStreamTrack::create(*scriptExecutionContext(), m_private->clone());
179 }
180
181 void MediaStreamTrack::stopTrack(StopMode mode)
182 {
183     // NOTE: this method is called when the "stop" method is called from JS, using the "ImplementedAs" IDL attribute.
184     // This is done because ActiveDOMObject requires a "stop" method.
185
186     if (ended())
187         return;
188
189     // An 'ended' event is not posted if m_ended is true when trackEnded is called, so set it now if we are
190     // not supposed to post the event.
191     if (mode == StopMode::Silently)
192         m_ended = true;
193
194     m_private->endTrack();
195     m_ended = true;
196
197     configureTrackRendering();
198 }
199
200 MediaStreamTrack::TrackSettings MediaStreamTrack::getSettings() const
201 {
202     auto& settings = m_private->settings();
203     TrackSettings result;
204     if (settings.supportsWidth())
205         result.width = settings.width();
206     if (settings.supportsHeight())
207         result.height = settings.height();
208     if (settings.supportsAspectRatio() && settings.aspectRatio()) // FIXME: Why the check for zero here?
209         result.aspectRatio = settings.aspectRatio();
210     if (settings.supportsFrameRate())
211         result.frameRate = settings.frameRate();
212     if (settings.supportsFacingMode())
213         result.facingMode = RealtimeMediaSourceSettings::facingMode(settings.facingMode());
214     if (settings.supportsVolume())
215         result.volume = settings.volume();
216     if (settings.supportsSampleRate())
217         result.sampleRate = settings.sampleRate();
218     if (settings.supportsSampleSize())
219         result.sampleSize = settings.sampleSize();
220     if (settings.supportsEchoCancellation())
221         result.echoCancellation = settings.echoCancellation();
222     if (settings.supportsDeviceId())
223         result.deviceId = settings.deviceId();
224     if (settings.supportsGroupId())
225         result.groupId = settings.groupId();
226
227     // FIXME: shouldn't this include displaySurface and logicalSurface?
228
229     return result;
230 }
231
232 static DoubleRange capabilityDoubleRange(const CapabilityValueOrRange& value)
233 {
234     DoubleRange range;
235     switch (value.type()) {
236     case CapabilityValueOrRange::Double:
237         range.min = value.value().asDouble;
238         range.max = range.min;
239         break;
240     case CapabilityValueOrRange::DoubleRange:
241         range.min = value.rangeMin().asDouble;
242         range.max = value.rangeMax().asDouble;
243         break;
244     case CapabilityValueOrRange::Undefined:
245     case CapabilityValueOrRange::ULong:
246     case CapabilityValueOrRange::ULongRange:
247         ASSERT_NOT_REACHED();
248     }
249     return range;
250 }
251
252 static LongRange capabilityIntRange(const CapabilityValueOrRange& value)
253 {
254     LongRange range;
255     switch (value.type()) {
256     case CapabilityValueOrRange::ULong:
257         range.min = value.value().asInt;
258         range.max = range.min;
259         break;
260     case CapabilityValueOrRange::ULongRange:
261         range.min = value.rangeMin().asInt;
262         range.max = value.rangeMax().asInt;
263         break;
264     case CapabilityValueOrRange::Undefined:
265     case CapabilityValueOrRange::Double:
266     case CapabilityValueOrRange::DoubleRange:
267         ASSERT_NOT_REACHED();
268     }
269     return range;
270 }
271
272 static Vector<String> capabilityStringVector(const Vector<RealtimeMediaSourceSettings::VideoFacingMode>& modes)
273 {
274     Vector<String> result;
275     result.reserveCapacity(modes.size());
276     for (auto& mode : modes)
277         result.uncheckedAppend(RealtimeMediaSourceSettings::facingMode(mode));
278     return result;
279 }
280
281 static Vector<bool> capabilityBooleanVector(RealtimeMediaSourceCapabilities::EchoCancellation cancellation)
282 {
283     Vector<bool> result;
284     result.reserveCapacity(2);
285     result.uncheckedAppend(true);
286     result.uncheckedAppend(cancellation == RealtimeMediaSourceCapabilities::EchoCancellation::ReadWrite);
287     return result;
288 }
289
290 MediaStreamTrack::TrackCapabilities MediaStreamTrack::getCapabilities() const
291 {
292     auto capabilities = m_private->capabilities();
293     TrackCapabilities result;
294     if (capabilities.supportsWidth())
295         result.width = capabilityIntRange(capabilities.width());
296     if (capabilities.supportsHeight())
297         result.height = capabilityIntRange(capabilities.height());
298     if (capabilities.supportsAspectRatio())
299         result.aspectRatio = capabilityDoubleRange(capabilities.aspectRatio());
300     if (capabilities.supportsFrameRate())
301         result.frameRate = capabilityDoubleRange(capabilities.frameRate());
302     if (capabilities.supportsFacingMode())
303         result.facingMode = capabilityStringVector(capabilities.facingMode());
304     if (capabilities.supportsVolume())
305         result.volume = capabilityDoubleRange(capabilities.volume());
306     if (capabilities.supportsSampleRate())
307         result.sampleRate = capabilityIntRange(capabilities.sampleRate());
308     if (capabilities.supportsSampleSize())
309         result.sampleSize = capabilityIntRange(capabilities.sampleSize());
310     if (capabilities.supportsEchoCancellation())
311         result.echoCancellation = capabilityBooleanVector(capabilities.echoCancellation());
312     if (capabilities.supportsDeviceId())
313         result.deviceId = capabilities.deviceId();
314     if (capabilities.supportsGroupId())
315         result.groupId = capabilities.groupId();
316     return result;
317 }
318
319 static MediaConstraints createMediaConstraints(const Optional<MediaTrackConstraints>& constraints)
320 {
321     if (!constraints) {
322         MediaConstraints validConstraints;
323         validConstraints.isValid = true;
324         return validConstraints;
325     }
326     return createMediaConstraints(constraints.value());
327 }
328
329 void MediaStreamTrack::applyConstraints(const Optional<MediaTrackConstraints>& constraints, DOMPromiseDeferred<void>&& promise)
330 {
331     m_promise = WTFMove(promise);
332
333     auto completionHandler = [this, weakThis = makeWeakPtr(*this), constraints](auto&& error) mutable {
334         if (!weakThis || !m_promise)
335             return;
336         if (error) {
337             m_promise->rejectType<IDLInterface<OverconstrainedError>>(OverconstrainedError::create(WTFMove(error->badConstraint), WTFMove(error->message)));
338             return;
339         }
340         m_promise->resolve();
341         m_constraints = constraints.valueOr(MediaTrackConstraints { });
342     };
343     m_private->applyConstraints(createMediaConstraints(constraints), WTFMove(completionHandler));
344 }
345
346 void MediaStreamTrack::addObserver(Observer& observer)
347 {
348     m_observers.append(&observer);
349 }
350
351 void MediaStreamTrack::removeObserver(Observer& observer)
352 {
353     m_observers.removeFirst(&observer);
354 }
355
356 void MediaStreamTrack::pageMutedStateDidChange()
357 {
358     if (m_ended || !isCaptureTrack())
359         return;
360
361     Document* document = this->document();
362     if (!document || !document->page())
363         return;
364
365     m_private->setMuted(document->page()->isMediaCaptureMuted());
366 }
367
368 MediaProducer::MediaStateFlags MediaStreamTrack::mediaState() const
369 {
370     if (m_ended || !isCaptureTrack())
371         return IsNotPlaying;
372
373     Document* document = this->document();
374     if (!document || !document->page())
375         return IsNotPlaying;
376
377     bool pageCaptureMuted = document->page()->isMediaCaptureMuted();
378
379     if (source().type() == RealtimeMediaSource::Type::Audio) {
380         if (source().interrupted() && !pageCaptureMuted)
381             return HasInterruptedAudioCaptureDevice;
382         if (muted())
383             return HasMutedAudioCaptureDevice;
384         if (m_private->isProducingData())
385             return HasActiveAudioCaptureDevice;
386     } else {
387         auto deviceType = source().deviceType();
388         ASSERT(deviceType == CaptureDevice::DeviceType::Camera || deviceType == CaptureDevice::DeviceType::Screen || deviceType == CaptureDevice::DeviceType::Window);
389         if (source().interrupted() && !pageCaptureMuted)
390             return deviceType == CaptureDevice::DeviceType::Camera ? HasInterruptedVideoCaptureDevice : HasInterruptedDisplayCaptureDevice;
391         if (muted())
392             return deviceType == CaptureDevice::DeviceType::Camera ? HasMutedVideoCaptureDevice : HasMutedDisplayCaptureDevice;
393         if (m_private->isProducingData())
394             return deviceType == CaptureDevice::DeviceType::Camera ? HasActiveVideoCaptureDevice : HasActiveDisplayCaptureDevice;
395     }
396
397     return IsNotPlaying;
398 }
399
400 void MediaStreamTrack::trackStarted(MediaStreamTrackPrivate&)
401 {
402     configureTrackRendering();
403 }
404
405 void MediaStreamTrack::trackEnded(MediaStreamTrackPrivate&)
406 {
407     // http://w3c.github.io/mediacapture-main/#life-cycle
408     // When a MediaStreamTrack track ends for any reason other than the stop() method being invoked, the User Agent must queue a task that runs the following steps:
409     // 1. If the track's readyState attribute has the value ended already, then abort these steps.
410     if (m_ended)
411         return;
412
413     // 2. Set track's readyState attribute to ended.
414     m_ended = true;
415
416     if (scriptExecutionContext()->activeDOMObjectsAreSuspended() || scriptExecutionContext()->activeDOMObjectsAreStopped())
417         return;
418
419     // 3. Notify track's source that track is ended so that the source may be stopped, unless other MediaStreamTrack objects depend on it.
420     // 4. Fire a simple event named ended at the object.
421     dispatchEvent(Event::create(eventNames().endedEvent, Event::CanBubble::No, Event::IsCancelable::No));
422
423     for (auto& observer : m_observers)
424         observer->trackDidEnd();
425
426     configureTrackRendering();
427 }
428     
429 void MediaStreamTrack::trackMutedChanged(MediaStreamTrackPrivate&)
430 {
431     if (scriptExecutionContext()->activeDOMObjectsAreSuspended() || scriptExecutionContext()->activeDOMObjectsAreStopped() || m_ended)
432         return;
433
434     AtomicString eventType = muted() ? eventNames().muteEvent : eventNames().unmuteEvent;
435     dispatchEvent(Event::create(eventType, Event::CanBubble::No, Event::IsCancelable::No));
436
437     configureTrackRendering();
438 }
439
440 void MediaStreamTrack::trackSettingsChanged(MediaStreamTrackPrivate&)
441 {
442     configureTrackRendering();
443 }
444
445 void MediaStreamTrack::trackEnabledChanged(MediaStreamTrackPrivate&)
446 {
447     configureTrackRendering();
448 }
449
450 void MediaStreamTrack::configureTrackRendering()
451 {
452     m_taskQueue.enqueueTask([this] {
453         if (auto document = this->document())
454             document->updateIsPlayingMedia();
455     });
456
457     // 4.3.1
458     // ... media from the source only flows when a MediaStreamTrack object is both unmuted and enabled
459 }
460
461 void MediaStreamTrack::stop()
462 {
463     stopTrack();
464     m_taskQueue.close();
465 }
466
467 const char* MediaStreamTrack::activeDOMObjectName() const
468 {
469     return "MediaStreamTrack";
470 }
471
472 bool MediaStreamTrack::canSuspendForDocumentSuspension() const
473 {
474     return !hasPendingActivity();
475 }
476
477 bool MediaStreamTrack::hasPendingActivity() const
478 {
479     return !m_ended;
480 }
481
482 AudioSourceProvider* MediaStreamTrack::audioSourceProvider()
483 {
484     return m_private->audioSourceProvider();
485 }
486
487 Document* MediaStreamTrack::document() const
488 {
489     return downcast<Document>(scriptExecutionContext());
490 }
491
492 } // namespace WebCore
493
494 #endif // ENABLE(MEDIA_STREAM)