feat: add {stream} opt to custom protocol registry to configure media player (#22955)

This commit is contained in:
Paul Frazee 2020-06-08 11:49:36 -05:00 коммит произвёл GitHub
Родитель 261f385b5e
Коммит c6c022dc46
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
15 изменённых файлов: 719 добавлений и 4 удалений

Просмотреть файл

@ -348,6 +348,7 @@ source_set("electron_lib") {
"//device/bluetooth",
"//device/bluetooth/public/cpp",
"//gin",
"//media/blink:blink",
"//media/capture/mojom:video_capture",
"//media/mojo/mojom",
"//net:extras",

Просмотреть файл

@ -63,8 +63,8 @@ The `protocol` module has the following methods:
module gets emitted and can be called only once.
Registers the `scheme` as standard, secure, bypasses content security policy for
resources, allows registering ServiceWorker and supports fetch API. Specify a
privilege with the value of `true` to enable the capability.
resources, allows registering ServiceWorker, supports fetch API, and streaming
video/audio. Specify a privilege with the value of `true` to enable the capability.
An example of registering a privileged scheme, that bypasses Content Security
Policy:
@ -103,6 +103,11 @@ cookies) are disabled for non standard schemes. So in general if you want to
register a custom protocol to replace the `http` protocol, you have to register
it as a standard scheme.
Protocols that use streams (http and stream protocols) should set `stream: true`.
The `<video>` and `<audio>` HTML elements expect protocols to buffer their
responses by default. The `stream` flag configures those elements to correctly
expect streaming responses.
### `protocol.registerFileProtocol(scheme, handler)`
* `scheme` String

Просмотреть файл

@ -8,3 +8,4 @@
* `allowServiceWorkers` Boolean (optional) - Default false.
* `supportFetchAPI` Boolean (optional) - Default false.
* `corsEnabled` Boolean (optional) - Default false.
* `stream` Boolean (optional) - Default false.

Просмотреть файл

@ -99,3 +99,4 @@ disable_unnecessary_ischromefirstrun_check.patch
disable_dcheck_that_fails_with_software_compositing.patch
fix_swap_global_proxies_before_initializing_the_windows_proxies.patch
fix_default_to_ntlm_v2_in_network_service.patch
feat_add_streaming-protocol_registry_to_multibuffer_data_source.patch

Просмотреть файл

@ -0,0 +1,82 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Paul Frazee <pfrazee@gmail.com>
Date: Sat, 6 Jun 2020 10:30:45 -0500
Subject: feat: add streaming-protocol registry to multibuffer_data_source
blink::WebMediaPlayerImpl - which provides the <video> and <audio> behaviors - needs to know
whether a data source will stream or fully buffer the response. It determines this behavior
with MultibufferDataSource::AssumeFullyBuffered() which has http/s hardwired. An incorrect
determination will cause the video/audio to fail playing.
This patch adds a list of "streaming protocols" to the MultibufferDataSource in order to allow
other protocols to register their streaming behavior. MultibufferDataSource::AssumeFullyBuffered()
then refers to the list so that it can correctly determine the data source's settings.
diff --git a/media/blink/multibuffer_data_source.cc b/media/blink/multibuffer_data_source.cc
index 0f6ae1fb8b4ff9f24ce3f407b7359e016fc6de5f..947812e1d877ad0c9434ea958598dd9a38227d46 100644
--- a/media/blink/multibuffer_data_source.cc
+++ b/media/blink/multibuffer_data_source.cc
@@ -10,9 +10,11 @@
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/macros.h"
+#include "base/no_destructor.h"
#include "base/numerics/ranges.h"
#include "base/numerics/safe_conversions.h"
#include "base/single_thread_task_runner.h"
+#include "base/strings/string_util.h"
#include "media/base/media_log.h"
#include "media/blink/buffered_data_source_host_impl.h"
#include "media/blink/multibuffer_reader.h"
@@ -65,10 +67,22 @@ const int kUpdateBufferSizeFrequency = 32;
// How long to we delay a seek after a read?
constexpr base::TimeDelta kSeekDelay = base::TimeDelta::FromMilliseconds(20);
+std::vector<std::string>* GetStreamingSchemes() {
+ static base::NoDestructor<std::vector<std::string>> streaming_schemes({
+ url::kHttpsScheme,
+ url::kHttpScheme
+ });
+ return streaming_schemes.get();
+}
+
} // namespace
namespace media {
+void AddStreamingScheme(const char* new_scheme) {
+ GetStreamingSchemes()->push_back(new_scheme);
+}
+
class MultibufferDataSource::ReadOperation {
public:
ReadOperation(int64_t position,
@@ -158,7 +172,14 @@ bool MultibufferDataSource::media_has_played() const {
bool MultibufferDataSource::AssumeFullyBuffered() const {
DCHECK(url_data_);
- return !url_data_->url().SchemeIsHTTPOrHTTPS();
+
+ const std::string scheme = url_data_->url().scheme();
+ for (const std::string& streaming_scheme : *GetStreamingSchemes()) {
+ if (base::LowerCaseEqualsASCII(scheme, streaming_scheme)) {
+ return false;
+ }
+ }
+ return true;
}
void MultibufferDataSource::SetReader(MultiBufferReader* reader) {
diff --git a/media/blink/multibuffer_data_source.h b/media/blink/multibuffer_data_source.h
index 3da5a7bba5e7cc0f54998a81649f4dd9d78aa7be..938ae6ebc92315b3a75019c3bc8c9058106f7695 100644
--- a/media/blink/multibuffer_data_source.h
+++ b/media/blink/multibuffer_data_source.h
@@ -30,6 +30,8 @@ class BufferedDataSourceHost;
class MediaLog;
class MultiBufferReader;
+void MEDIA_BLINK_EXPORT AddStreamingScheme(const char* new_scheme);
+
// A data source capable of loading URLs and buffering the data using an
// in-memory sliding window.
//

Просмотреть файл

@ -28,6 +28,9 @@ namespace {
// List of registered custom standard schemes.
std::vector<std::string> g_standard_schemes;
// List of registered custom streaming schemes.
std::vector<std::string> g_streaming_schemes;
struct SchemeOptions {
bool standard = false;
bool secure = false;
@ -35,6 +38,7 @@ struct SchemeOptions {
bool allowServiceWorkers = false;
bool supportFetchAPI = false;
bool corsEnabled = false;
bool stream = false;
};
struct CustomScheme {
@ -66,6 +70,7 @@ struct Converter<CustomScheme> {
opt.Get("allowServiceWorkers", &(out->options.allowServiceWorkers));
opt.Get("supportFetchAPI", &(out->options.supportFetchAPI));
opt.Get("corsEnabled", &(out->options.corsEnabled));
opt.Get("stream", &(out->options.stream));
}
return true;
}
@ -119,6 +124,9 @@ void RegisterSchemesAsPrivileged(gin_helper::ErrorThrower thrower,
if (custom_scheme.options.allowServiceWorkers) {
service_worker_schemes.push_back(custom_scheme.scheme);
}
if (custom_scheme.options.stream) {
g_streaming_schemes.push_back(custom_scheme.scheme);
}
}
const auto AppendSchemesToCmdLine = [](const char* switch_name,
@ -138,6 +146,8 @@ void RegisterSchemesAsPrivileged(gin_helper::ErrorThrower thrower,
service_worker_schemes);
AppendSchemesToCmdLine(electron::switches::kStandardSchemes,
g_standard_schemes);
AppendSchemesToCmdLine(electron::switches::kStreamingSchemes,
g_streaming_schemes);
}
namespace {

Просмотреть файл

@ -744,7 +744,8 @@ void ElectronBrowserClient::AppendExtraCommandLineSwitches(
switches::kStandardSchemes, switches::kEnableSandbox,
switches::kSecureSchemes, switches::kBypassCSPSchemes,
switches::kCORSSchemes, switches::kFetchSchemes,
switches::kServiceWorkerSchemes, switches::kEnableApiFilteringLogging};
switches::kServiceWorkerSchemes, switches::kEnableApiFilteringLogging,
switches::kStreamingSchemes};
command_line->CopySwitchesFrom(*base::CommandLine::ForCurrentProcess(),
kCommonSwitchNames,
base::size(kCommonSwitchNames));

Просмотреть файл

@ -221,6 +221,9 @@ const char kFetchSchemes[] = "fetch-schemes";
// Register schemes as CORS enabled.
const char kCORSSchemes[] = "cors-schemes";
// Register schemes as streaming responses.
const char kStreamingSchemes[] = "streaming-schemes";
// The browser process app model ID
const char kAppUserModelId[] = "app-user-model-id";

Просмотреть файл

@ -111,6 +111,7 @@ extern const char kSecureSchemes[];
extern const char kBypassCSPSchemes[];
extern const char kFetchSchemes[];
extern const char kCORSSchemes[];
extern const char kStreamingSchemes[];
extern const char kAppUserModelId[];
extern const char kAppPath[];
extern const char kEnableApiFilteringLogging[];

Просмотреть файл

@ -20,6 +20,7 @@
#include "content/public/renderer/render_thread.h"
#include "content/public/renderer/render_view.h"
#include "electron/buildflags/buildflags.h"
#include "media/blink/multibuffer_data_source.h"
#include "printing/buildflags/buildflags.h"
#include "shell/common/color_util.h"
#include "shell/common/gin_helper/dictionary.h"
@ -107,6 +108,11 @@ RendererClientBase::RendererClientBase() {
ParseSchemesCLISwitch(command_line, switches::kCORSSchemes);
for (const std::string& scheme : cors_schemes_list)
url::AddCorsEnabledScheme(scheme.c_str());
// Parse --streaming-schemes=scheme1,scheme2
std::vector<std::string> streaming_schemes_list =
ParseSchemesCLISwitch(command_line, switches::kStreamingSchemes);
for (const std::string& scheme : streaming_schemes_list)
media::AddStreamingScheme(scheme.c_str());
isolated_world_ = base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kContextIsolation);
// We rely on the unique process host id which is notified to the

Просмотреть файл

@ -9,6 +9,7 @@ import * as qs from 'querystring';
import * as stream from 'stream';
import { closeWindow } from './window-helpers';
import { emittedOnce } from './events-helpers';
import { WebmGenerator } from './video-helpers';
const fixturesPath = path.resolve(__dirname, '..', 'spec', 'fixtures');
@ -822,4 +823,99 @@ describe('protocol module', () => {
}
}
});
describe('protocol.registerSchemesAsPrivileged stream', async function () {
const pagePath = path.join(fixturesPath, 'pages', 'video.html');
const videoSourceImagePath = path.join(fixturesPath, 'video-source-image.webp');
const videoPath = path.join(fixturesPath, 'video.webm');
const standardScheme = (global as any).standardScheme;
let w: BrowserWindow = null as unknown as BrowserWindow;
before(async () => {
// generate test video
const imageBase64 = await fs.promises.readFile(videoSourceImagePath, 'base64');
const imageDataUrl = `data:image/webp;base64,${imageBase64}`;
const encoder = new WebmGenerator(15);
for (let i = 0; i < 30; i++) {
encoder.add(imageDataUrl);
}
await new Promise((resolve, reject) => {
encoder.compile((output:Uint8Array) => {
fs.promises.writeFile(videoPath, output).then(resolve, reject);
});
});
});
after(async () => {
await fs.promises.unlink(videoPath);
});
beforeEach(async () => {
w = new BrowserWindow({ show: false });
});
afterEach(async () => {
await closeWindow(w);
w = null as unknown as BrowserWindow;
await protocol.unregisterProtocol(standardScheme);
await protocol.unregisterProtocol('stream');
});
it('does not successfully play videos with stream: false on streaming protocols', async () => {
await streamsResponses(standardScheme, 'error');
});
it('successfully plays videos with stream: true on streaming protocols', async () => {
await streamsResponses('stream', 'play');
});
async function streamsResponses (testingScheme: string, expected: any) {
const protocolHandler = (request: any, callback: Function) => {
if (request.url.includes('/video.webm')) {
const stat = fs.statSync(videoPath);
const fileSize = stat.size;
const range = request.headers.Range;
if (range) {
const parts = range.replace(/bytes=/, '').split('-');
const start = parseInt(parts[0], 10);
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
const chunksize = (end - start) + 1;
const headers = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': String(chunksize),
'Content-Type': 'video/webm'
};
callback({ statusCode: 206, headers, data: fs.createReadStream(videoPath, { start, end }) });
} else {
callback({
statusCode: 200,
headers: {
'Content-Length': String(fileSize),
'Content-Type': 'video/webm'
},
data: fs.createReadStream(videoPath)
});
}
} else {
callback({ data: fs.createReadStream(pagePath), headers: { 'Content-Type': 'text/html' }, statusCode: 200 });
}
};
await registerStreamProtocol(standardScheme, protocolHandler);
await registerStreamProtocol('stream', protocolHandler);
const newContents: WebContents = (webContents as any).create({ nodeIntegration: true });
try {
newContents.loadURL(testingScheme + '://fake-host');
const [, response] = await emittedOnce(ipcMain, 'result');
expect(response).to.deep.equal(expected);
} finally {
// This is called in a timeout to avoid a crash that happens when
// calling destroy() in a microtask.
setTimeout(() => {
(newContents as any).destroy();
});
}
}
});
});

Просмотреть файл

@ -27,12 +27,13 @@ app.commandLine.appendSwitch('use-fake-device-for-media-stream');
global.standardScheme = 'app';
global.zoomScheme = 'zoom';
protocol.registerSchemesAsPrivileged([
{ scheme: global.standardScheme, privileges: { standard: true, secure: true } },
{ scheme: global.standardScheme, privileges: { standard: true, secure: true, stream: false } },
{ scheme: global.zoomScheme, privileges: { standard: true, secure: true } },
{ scheme: 'cors-blob', privileges: { corsEnabled: true, supportFetchAPI: true } },
{ scheme: 'cors', privileges: { corsEnabled: true, supportFetchAPI: true } },
{ scheme: 'no-cors', privileges: { supportFetchAPI: true } },
{ scheme: 'no-fetch', privileges: { corsEnabled: true } },
{ scheme: 'stream', privileges: { standard: true, stream: true } },
{ scheme: 'foo', privileges: { standard: true } },
{ scheme: 'bar', privileges: { standard: true } }
]);

493
spec-main/video-helpers.js Normal file
Просмотреть файл

@ -0,0 +1,493 @@
/*
https://github.com/antimatter15/whammy
The MIT License (MIT)
Copyright (c) 2015 Kevin Kwok
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
function atob (str) {
return Buffer.from(str, 'base64').toString('binary');
}
// in this case, frames has a very specific meaning, which will be
// detailed once i finish writing the code
function ToWebM (frames, outputAsArray) {
const info = checkFrames(frames);
// max duration by cluster in milliseconds
const CLUSTER_MAX_DURATION = 30000;
const EBML = [
{
'id': 0x1a45dfa3, // EBML
'data': [
{
'data': 1,
'id': 0x4286 // EBMLVersion
},
{
'data': 1,
'id': 0x42f7 // EBMLReadVersion
},
{
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
},
{
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
},
{
'data': 'webm',
'id': 0x4282 // DocType
},
{
'data': 2,
'id': 0x4287 // DocTypeVersion
},
{
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}
]
},
{
'id': 0x18538067, // Segment
'data': [
{
'id': 0x1549a966, // Info
'data': [
{
'data': 1e6, // do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
},
{
'data': 'whammy',
'id': 0x4d80 // MuxingApp
},
{
'data': 'whammy',
'id': 0x5741 // WritingApp
},
{
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}
]
},
{
'id': 0x1654ae6b, // Tracks
'data': [
{
'id': 0xae, // TrackEntry
'data': [
{
'data': 1,
'id': 0xd7 // TrackNumber
},
{
'data': 1,
'id': 0x73c5 // TrackUID
},
{
'data': 0,
'id': 0x9c // FlagLacing
},
{
'data': 'und',
'id': 0x22b59c // Language
},
{
'data': 'V_VP8',
'id': 0x86 // CodecID
},
{
'data': 'VP8',
'id': 0x258688 // CodecName
},
{
'data': 1,
'id': 0x83 // TrackType
},
{
'id': 0xe0, // Video
'data': [
{
'data': info.width,
'id': 0xb0 // PixelWidth
},
{
'data': info.height,
'id': 0xba // PixelHeight
}
]
}
]
}
]
},
{
'id': 0x1c53bb6b, // Cues
'data': [
// cue insertion point
]
}
// cluster insertion point
]
}
];
const segment = EBML[1];
const cues = segment.data[2];
// Generate clusters (max duration)
let frameNumber = 0;
let clusterTimecode = 0;
while (frameNumber < frames.length) {
const cuePoint = {
'id': 0xbb, // CuePoint
'data': [
{
'data': Math.round(clusterTimecode),
'id': 0xb3 // CueTime
},
{
'id': 0xb7, // CueTrackPositions
'data': [
{
'data': 1,
'id': 0xf7 // CueTrack
},
{
'data': 0, // to be filled in when we know it
'size': 8,
'id': 0xf1 // CueClusterPosition
}
]
}
]
};
cues.data.push(cuePoint);
const clusterFrames = [];
let clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < CLUSTER_MAX_DURATION);
let clusterCounter = 0;
const cluster = {
'id': 0x1f43b675, // Cluster
'data': [
{
'data': Math.round(clusterTimecode),
'id': 0xe7 // Timecode
}
].concat(clusterFrames.map(function (webp) {
const block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}))
};
// Add cluster to segment
segment.data.push(cluster);
clusterTimecode += clusterDuration;
}
// First pass to compute cluster positions
let position = 0;
for (let i = 0; i < segment.data.length; i++) {
if (i >= 3) {
cues.data[i - 3].data[1].data[1].data = position;
}
const data = generateEBML([segment.data[i]], outputAsArray);
position += data.size || data.byteLength || data.length;
if (i !== 2) { // not cues
// Save results to avoid having to encode everything twice
segment.data[i] = data;
}
}
return generateEBML(EBML, outputAsArray);
}
// sums the lengths of all the frames and gets the duration, woo
function checkFrames (frames) {
const width = frames[0].width;
const height = frames[0].height;
let duration = frames[0].duration;
for (let i = 1; i < frames.length; i++) {
if (frames[i].width !== width) throw new Error('Frame ' + (i + 1) + ' has a different width');
if (frames[i].height !== height) throw new Error('Frame ' + (i + 1) + ' has a different height');
if (frames[i].duration < 0 || frames[i].duration > 0x7fff) throw new Error('Frame ' + (i + 1) + ' has a weird duration (must be between 0 and 32767)');
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
function numToBuffer (num) {
const parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
function numToFixedBuffer (num, size) {
const parts = new Uint8Array(size);
for (let i = size - 1; i >= 0; i--) {
parts[i] = num & 0xff;
num = num >> 8;
}
return parts;
}
function strToBuffer (str) {
// return new Blob([str]);
const arr = new Uint8Array(str.length);
for (let i = 0; i < str.length; i++) {
arr[i] = str.charCodeAt(i);
}
return arr;
// this is slower
// return new Uint8Array(str.split('').map(function(e){
// return e.charCodeAt(0)
// }))
}
// sorry this is ugly, and sort of hard to understand exactly why this was done
// at all really, but the reason is that there's some code below that i dont really
// feel like understanding, and this is easier than using my brain.
function bitsToBuffer (bits) {
const data = [];
const pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (let i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
function generateEBML (json) {
const ebml = [];
for (let i = 0; i < json.length; i++) {
if (!('id' in json[i])) {
// already encoded blob or byteArray
ebml.push(json[i]);
continue;
}
let data = json[i].data;
if (typeof data === 'object') data = generateEBML(data);
if (typeof data === 'number') data = ('size' in json[i]) ? numToFixedBuffer(data, json[i].size) : bitsToBuffer(data.toString(2));
if (typeof data === 'string') data = strToBuffer(data);
const len = data.size || data.byteLength || data.length;
const zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
const sizeStr = len.toString(2);
const padded = (new Array((zeroes * 7 + 7 + 1) - sizeStr.length)).join('0') + sizeStr;
const size = (new Array(zeroes)).join('0') + '1' + padded;
// i actually dont quite understand what went on up there, so I'm not really
// going to fix this, i'm probably just going to write some hacky thing which
// converts that string into a buffer-esque thing
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
// convert ebml to an array
const buffer = toFlatArray(ebml);
return new Uint8Array(buffer);
}
function toFlatArray (arr, outBuffer) {
if (outBuffer == null) {
outBuffer = [];
}
for (let i = 0; i < arr.length; i++) {
if (typeof arr[i] === 'object') {
// an array
toFlatArray(arr[i], outBuffer);
} else {
// a simple element
outBuffer.push(arr[i]);
}
}
return outBuffer;
}
function makeSimpleBlock (data) {
let flags = 0;
if (data.keyframe) flags |= 128;
if (data.invisible) flags |= 8;
if (data.lacing) flags |= (data.lacing << 1);
if (data.discardable) flags |= 1;
if (data.trackNum > 127) {
throw new Error('TrackNumber > 127 not supported');
}
const out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function (e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
// here's something else taken verbatim from weppy, awesome rite?
function parseWebP (riff) {
const VP8 = riff.RIFF[0].WEBP[0];
const frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
const c = [];
for (let i = 0; i < 4; i++) c[i] = VP8.charCodeAt(frameStart + 3 + i);
// the code below is literally copied verbatim from the bitstream spec
let tmp = (c[1] << 8) | c[0];
const width = tmp & 0x3FFF;
const horizontalScale = tmp >> 14;
tmp = (c[3] << 8) | c[2];
const height = tmp & 0x3FFF;
const verticalScale = tmp >> 14;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
// i think i'm going off on a riff by pretending this is some known
// idiom which i'm making a casual and brilliant pun about, but since
// i can't find anything on google which conforms to this idiomatic
// usage, I'm assuming this is just a consequence of some psychotic
// break which makes me make up puns. well, enough riff-raff (aha a
// rescue of sorts), this function was ripped wholesale from weppy
function parseRIFF (string) {
let offset = 0;
const chunks = {};
while (offset < string.length) {
const id = string.substr(offset, 4);
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
const len = parseInt(string.substr(offset + 4, 4).split('').map(function (i) {
const unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
const data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id].push(parseRIFF(data));
} else if (id === 'WEBP') {
// Use (offset + 8) to skip past "VP8 "/"VP8L"/"VP8X" field after "WEBP"
chunks[id].push(string.substr(offset + 8));
offset = string.length;
} else {
// Unknown chunk type; push entire payload
chunks[id].push(string.substr(offset + 4));
offset = string.length;
}
}
return chunks;
}
// here's a little utility function that acts as a utility for other functions
// basically, the only purpose is for encoding "Duration", which is encoded as
// a double (considerably more difficult to encode than an integer)
function doubleToString (num) {
return [].slice.call(
new Uint8Array(
(
new Float64Array([num]) // create a float64 array
).buffer) // extract the array buffer
, 0) // convert the Uint8Array into a regular array
.map(function (e) { // since it's a regular array, we can now use map
return String.fromCharCode(e); // encode all the bytes individually
})
.reverse() // correct the byte endianness (assume it's little endian for now)
.join(''); // join the bytes in holy matrimony as a string
}
function WhammyVideo (speed, quality) { // a more abstract-ish API
this.frames = [];
this.duration = 1000 / speed;
this.quality = quality || 0.8;
}
WhammyVideo.prototype.add = function (frame, duration) {
if (typeof duration !== 'undefined' && this.duration) throw new Error("you can't pass a duration if the fps is set");
if (typeof duration === 'undefined' && !this.duration) throw new Error("if you don't have the fps set, you need to have durations here.");
if (frame.canvas) { // CanvasRenderingContext2D
frame = frame.canvas;
}
if (frame.toDataURL) {
// frame = frame.toDataURL('image/webp', this.quality);
// quickly store image data so we don't block cpu. encode in compile method.
frame = frame.getContext('2d').getImageData(0, 0, frame.width, frame.height);
} else if (typeof frame !== 'string') {
throw new Error('frame must be a a HTMLCanvasElement, a CanvasRenderingContext2D or a DataURI formatted string');
}
if (typeof frame === 'string' && !(/^data:image\/webp;base64,/ig).test(frame)) {
throw new Error('Input must be formatted properly as a base64 encoded DataURI of type image/webp');
}
this.frames.push({
image: frame,
duration: duration || this.duration
});
};
WhammyVideo.prototype.compile = function (callback) {
const webm = new ToWebM(this.frames.map(function (frame) {
const webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
callback(webm);
};
export const WebmGenerator = WhammyVideo;

14
spec/fixtures/pages/video.html поставляемый Normal file
Просмотреть файл

@ -0,0 +1,14 @@
<html>
<body>
<video id="videoPlayer" src="/video.webm" autoplay muted></video>
<script>
const { ipcRenderer } = require('electron');
videoPlayer.addEventListener('play', e => {
ipcRenderer.send('result', 'play');
});
videoPlayer.addEventListener('error', e => {
ipcRenderer.send('result', 'error');
});
</script>
</body>
</html>

Двоичные данные
spec/fixtures/video-source-image.webp поставляемый Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 62 KiB