forked from phoboslab/jsmpeg
-
Notifications
You must be signed in to change notification settings - Fork 0
/
jsmpeg.js
105 lines (90 loc) · 3.61 KB
/
jsmpeg.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
/*! jsmpeg v1.0 | (c) Dominic Szablewski | MIT license */
// This sets up the JSMpeg "Namespace". The object is empty apart from the Now()
// utility function and the automatic CreateVideoElements() after DOMReady.
var JSMpeg = {
// The Player sets up the connections between source, demuxer, decoders,
// renderer and audio output. It ties everything together, is responsible
// of scheduling decoding and provides some convenience methods for
// external users.
Player: null,
// A Video Element wraps the Player, shows HTML controls to start/pause
// the video and handles Audio unlocking on iOS. VideoElements can be
// created directly in HTML using the <div class="jsmpeg"/> tag.
VideoElement: null,
// The BitBuffer wraps a Uint8Array and allows reading an arbitrary number
// of bits at a time. On writing, the BitBuffer either expands its
// internal buffer (for static files) or deletes old data (for streaming).
BitBuffer: null,
// A Source provides raw data from HTTP, a WebSocket connection or any
// other mean. Sources must support the following API:
// .connect(destinationNode)
// .write(buffer)
// .start() - start reading
// .resume(headroom) - continue reading; headroom to play pos in seconds
// .established - boolean, true after connection is established
// .completed - boolean, true if the source is completely loaded
// .progress - float 0-1
Source: {},
// A Demuxer may sit between a Source and a Decoder. It separates the
// incoming raw data into Video, Audio and other Streams. API:
// .connect(streamId, destinationNode)
// .write(buffer)
// .currentTime – float, in seconds
// .startTime - float, in seconds
Demuxer: {},
// A Decoder accepts an incoming Stream of raw Audio or Video data, buffers
// it and upon `.decode()` decodes a single frame of data. Video decoders
// call `destinationNode.render(Y, Cr, CB)` with the decoded pixel data;
// Audio decoders call `destinationNode.play(left, right)` with the decoded
// PCM data. API:
// .connect(destinationNode)
// .write(pts, buffer)
// .decode()
// .seek(time)
// .currentTime - float, in seconds
// .startTime - float, in seconds
Decoder: {},
// A Renderer accepts raw YCrCb data in 3 separate buffers via the render()
// method. Renderers typically convert the data into the RGBA color space
// and draw it on a Canvas, but other output - such as writing PNGs - would
// be conceivable. API:
// .render(y, cr, cb) - pixel data as Uint8Arrays
// .enabled - wether the renderer does anything upon receiving data
Renderer: {},
// Audio Outputs accept raw Stero PCM data in 2 separate buffers via the
// play() method. Outputs typically play the audio on the user's device.
// API:
// .play(sampleRate, left, right) - rate in herz; PCM data as Uint8Arrays
// .stop()
// .enqueuedTime - float, in seconds
// .enabled - wether the output does anything upon receiving data
AudioOutput: {},
Now: function() {
return window.performance
? window.performance.now() / 1000
: Date.now() / 1000;
},
CreateVideoElements: function() {
var elements = document.querySelectorAll('.jsmpeg');
for (var i = 0; i < elements.length; i++) {
new JSMpeg.VideoElement(elements[i]);
}
},
Fill: function(array, value) {
if (array.fill) {
array.fill(value);
}
else {
for (var i = 0; i < array.length; i++) {
array[i] = value;
}
}
}
};
// Automatically create players for all found <div class="jsmpeg"/> elements.
if (document.readyState === 'complete') {
JSMpeg.CreateVideoElements();
}
else {
document.addEventListener('DOMContentLoaded', JSMpeg.CreateVideoElements);
}