From a4e01da27c08e43a67b2618ad1e71c1f8f86d5cd Mon Sep 17 00:00:00 2001 From: Biswakalyan Bhuyan Date: Thu, 19 Sep 2024 15:33:11 +0530 Subject: youtube fronend --- youtube/static/js/av-merge.js | 987 ++++++++++++++++++++++++++++++++++ youtube/static/js/comments.js | 20 + youtube/static/js/common.js | 116 ++++ youtube/static/js/hotkeys.js | 61 +++ youtube/static/js/playlistadd.js | 86 +++ youtube/static/js/plyr-start.js | 121 +++++ youtube/static/js/sponsorblock.js | 40 ++ youtube/static/js/transcript-table.js | 151 ++++++ youtube/static/js/watch.js | 199 +++++++ 9 files changed, 1781 insertions(+) create mode 100644 youtube/static/js/av-merge.js create mode 100644 youtube/static/js/comments.js create mode 100644 youtube/static/js/common.js create mode 100644 youtube/static/js/hotkeys.js create mode 100644 youtube/static/js/playlistadd.js create mode 100644 youtube/static/js/plyr-start.js create mode 100644 youtube/static/js/sponsorblock.js create mode 100644 youtube/static/js/transcript-table.js create mode 100644 youtube/static/js/watch.js (limited to 'youtube/static/js') diff --git a/youtube/static/js/av-merge.js b/youtube/static/js/av-merge.js new file mode 100644 index 0000000..e00f440 --- /dev/null +++ b/youtube/static/js/av-merge.js @@ -0,0 +1,987 @@ +// Heavily modified from +// https://github.com/nickdesaulniers/netfix/issues/4#issuecomment-578856471 +// which was in turn modified from +// https://github.com/nickdesaulniers/netfix/blob/gh-pages/demo/bufferWhenNeeded.html + +// Useful reading: +// https://stackoverflow.com/questions/35177797/what-exactly-is-fragmented-mp4fmp4-how-is-it-different-from-normal-mp4 +// https://axel.isouard.fr/blog/2016/05/24/streaming-webm-video-over-html5-with-media-source + +// We start by parsing the sidx (segment index) table in order to get the +// byte ranges of the segments. The byte range of the sidx table is provided +// by the indexRange variable by YouTube + +// Useful info, as well as segments vs sequence mode (we use segments mode) +// https://joshuatz.com/posts/2020/appending-videos-in-javascript-with-mediasource-buffers/ + +// SourceBuffer data limits: +// https://developers.google.com/web/updates/2017/10/quotaexceedederror + +// TODO: Call abort to cancel in-progress appends? + + + +function AVMerge(video, srcInfo, startTime){ + this.audioSource = null; + this.videoSource = null; + this.avRatio = null; + this.videoStream = null; + this.audioStream = null; + this.seeking = false; + this.startTime = startTime; + this.video = video; + this.mediaSource = null; + this.closed = false; + this.opened = false; + this.audioEndOfStreamCalled = false; + this.videoEndOfStreamCalled = false; + if (!('MediaSource' in window)) { + reportError('MediaSource not supported.'); + return; + } + + // Find supported video and audio sources + for (let src of srcInfo['videos']) { + if (MediaSource.isTypeSupported(src['mime_codec'])) { + reportDebug('Using video source', src['mime_codec'], + src['quality_string'], 'itag', src['itag']); + this.videoSource = src; + break; + } + } + for (let src of srcInfo['audios']) { + if (MediaSource.isTypeSupported(src['mime_codec'])) { + reportDebug('Using audio source', src['mime_codec'], + src['quality_string'], 'itag', src['itag']); + this.audioSource = src; + break; + } + } + if (this.videoSource === null) + reportError('No supported video MIME type or codec found: ', + srcInfo['videos'].map(s => s.mime_codec).join(', ')); + if (this.audioSource === null) + reportError('No supported audio MIME type or codec found: ', + srcInfo['audios'].map(s => s.mime_codec).join(', ')); + if (this.videoSource === null || this.audioSource === null) + return; + + if (this.videoSource.bitrate && this.audioSource.bitrate) + this.avRatio = this.audioSource.bitrate/this.videoSource.bitrate; + else + this.avRatio = 1/10; + + this.setup(); +} +AVMerge.prototype.setup = function() { + this.mediaSource = new MediaSource(); + this.video.src = URL.createObjectURL(this.mediaSource); + this.mediaSource.onsourceopen = this.sourceOpen.bind(this); +} + +AVMerge.prototype.sourceOpen = function(_) { + // If after calling mediaSource.endOfStream, the user seeks back + // into the video, the sourceOpen event will be fired again. Do not + // overwrite the streams. + this.audioEndOfStreamCalled = false; + this.videoEndOfStreamCalled = false; + if (this.opened) + return; + this.opened = true; + this.videoStream = new Stream(this, this.videoSource, this.startTime, + this.avRatio); + this.audioStream = new Stream(this, this.audioSource, this.startTime, + this.avRatio); + + this.videoStream.setup(); + this.audioStream.setup(); + + this.timeUpdateEvt = addEvent(this.video, 'timeupdate', + this.checkBothBuffers.bind(this)); + this.seekingEvt = addEvent(this.video, 'seeking', + debounce(this.seek.bind(this), 500)); + //this.video.onseeked = function() {console.log('seeked')}; +} +AVMerge.prototype.close = function() { + if (this.closed) + return; + this.closed = true; + this.videoStream.close(); + this.audioStream.close(); + this.timeUpdateEvt.remove(); + this.seekingEvt.remove(); + if (this.mediaSource.readyState == 'open') + this.mediaSource.endOfStream(); +} +AVMerge.prototype.checkBothBuffers = function() { + this.audioStream.checkBuffer(); + this.videoStream.checkBuffer(); +} +AVMerge.prototype.seek = function(e) { + if (this.mediaSource.readyState === 'open') { + this.seeking = true; + this.audioStream.handleSeek(); + this.videoStream.handleSeek(); + this.seeking = false; + } else { + reportWarning('seek but not open? readyState:', + this.mediaSource.readyState); + } +} +AVMerge.prototype.audioEndOfStream = function() { + if (this.videoEndOfStreamCalled && !this.audioEndOfStreamCalled) { + reportDebug('Calling mediaSource.endOfStream()'); + this.mediaSource.endOfStream(); + } + this.audioEndOfStreamCalled = true; +} +AVMerge.prototype.videoEndOfStream = function() { + if (this.audioEndOfStreamCalled && !this.videoEndOfStreamCalled) { + reportDebug('Calling mediaSource.endOfStream()'); + this.mediaSource.endOfStream(); + } + this.videoEndOfStreamCalled = true; +} +AVMerge.prototype.printDebuggingInfo = function() { + reportDebug('videoSource:', this.videoSource); + reportDebug('audioSource:', this.videoSource); + reportDebug('video sidx:', this.videoStream.sidx); + reportDebug('audio sidx:', this.audioStream.sidx); + reportDebug('video updating', this.videoStream.sourceBuffer.updating); + reportDebug('audio updating', this.audioStream.sourceBuffer.updating); + reportDebug('video duration:', this.video.duration); + reportDebug('video current time:', this.video.currentTime); + reportDebug('mediaSource.readyState:', this.mediaSource.readyState); + reportDebug('videoEndOfStreamCalled', this.videoEndOfStreamCalled); + reportDebug('audioEndOfStreamCalled', this.audioEndOfStreamCalled); + for (let obj of [this.videoStream, this.audioStream]) { + reportDebug(obj.streamType, 'stream buffered times:'); + for (let i=0; i { + this.reportError('sourceBuffer error', e); + }); + this.updateendEvt = addEvent(this.sourceBuffer, 'updateend', (e) => { + if (this.appendQueue.length != 0) { + this.appendSegment(...this.appendQueue.shift()); + } + }); +} +Stream.prototype.setup = async function(){ + // Group requests together + if (this.initRange.end+1 == this.indexRange.start){ + fetchRange( + this.url, + this.initRange.start, + this.indexRange.end, + 'Initialization+index segments', + ).then( + (buffer) => { + let init_end = this.initRange.end - this.initRange.start + 1; + let index_start = this.indexRange.start - this.initRange.start; + let index_end = this.indexRange.end - this.initRange.start + 1; + this.setupInitSegment(buffer.slice(0, init_end)); + this.setupSegmentIndex(buffer.slice(index_start, index_end)); + } + ); + } else { + // initialization data + await fetchRange( + this.url, + this.initRange.start, + this.initRange.end, + 'Initialization segment', + ).then(this.setupInitSegment.bind(this)); + + // sidx (segment index) table + fetchRange( + this.url, + this.indexRange.start, + this.indexRange.end, + 'Index segment', + ).then(this.setupSegmentIndex.bind(this)); + } +} +Stream.prototype.setupInitSegment = function(initSegment) { + if (this.ext == 'webm') + this.sidx = extractWebmInitializationInfo(initSegment); + this.appendSegment(null, initSegment); +} +Stream.prototype.setupSegmentIndex = async function(indexSegment){ + if (this.ext == 'webm') { + this.sidx.entries = parseWebmCues(indexSegment, this.sidx); + if (this.fileSize) { + let lastIdx = this.sidx.entries.length - 1; + this.sidx.entries[lastIdx].end = this.fileSize - 1; + } + for (let entry of this.sidx.entries) { + entry.subSegmentDuration = entry.tickEnd - entry.tickStart + 1; + if (entry.end) + entry.referencedSize = entry.end - entry.start + 1; + } + } else { + let box = unbox(indexSegment); + this.sidx = sidx_parse(box.data, this.indexRange.end+1); + } + this.fetchSegmentIfNeeded(this.getSegmentIdx(this.startTime)); +} +Stream.prototype.close = function() { + // Prevents appendSegment adding to buffer if request finishes + // after closing + this.closed = true; + if (this.sourceBuffer.updating) + this.sourceBuffer.abort(); + this.mediaSource.removeSourceBuffer(this.sourceBuffer); + this.updateendEvt.remove(); +} +Stream.prototype.appendSegment = function(segmentIdx, chunk) { + if (this.closed) + return; + + this.reportDebug('Received segment', segmentIdx) + + // cannot append right now, schedule for updateend + if (this.sourceBuffer.updating) { + this.reportDebug('sourceBuffer updating, queueing for later'); + this.appendQueue.push([segmentIdx, chunk]); + if (this.appendQueue.length > 2){ + this.reportWarning('appendQueue length:', this.appendQueue.length); + } + return; + } + try { + this.sourceBuffer.appendBuffer(chunk); + if (segmentIdx !== null) + this.sidx.entries[segmentIdx].have = true; + this.appendRetries = 0; + } catch (e) { + if (e.name !== 'QuotaExceededError') { + throw e; + } + this.reportWarning('QuotaExceededError.'); + + // Count how many bytes are in buffer to update buffering target, + // updating .have as well for when we need to delete segments + let bytesInBuffer = 0; + for (let i = 0; i < this.sidx.entries.length; i++) { + if (this.segmentInBuffer(i)) + bytesInBuffer += this.sidx.entries[i].referencedSize; + else if (this.sidx.entries[i].have) { + this.sidx.entries[i].have = false; + this.sidx.entries[i].requested = false; + } + } + bytesInBuffer = Math.floor(4/5*bytesInBuffer); + if (bytesInBuffer < this.bufferTarget) { + this.bufferTarget = bytesInBuffer; + this.reportDebug('New buffer target:', this.bufferTarget); + } + + // Delete 10 segments (arbitrary) from buffer, making sure + // not to delete current one + let currentSegment = this.getSegmentIdx(this.video.currentTime); + let numDeleted = 0; + let i = 0; + const DELETION_TARGET = 10; + let toDelete = []; // See below for why we have to schedule it + this.reportDebug('Deleting segments from beginning of buffer.'); + while (numDeleted < DELETION_TARGET && i < currentSegment) { + if (this.sidx.entries[i].have) { + toDelete.push(i) + numDeleted++; + } + i++; + } + if (numDeleted < DELETION_TARGET) + this.reportDebug('Deleting segments from end of buffer.'); + + i = this.sidx.entries.length - 1; + while (numDeleted < DELETION_TARGET && i > currentSegment) { + if (this.sidx.entries[i].have) { + toDelete.push(i) + numDeleted++; + } + i--; + } + + // When calling .remove, the sourceBuffer will go into updating=true + // state, and remove cannot be called until it is done. So we have + // to delete on the updateend event for subsequent ones. + let removeFinishedEvent; + let deletedStuff = (toDelete.length !== 0) + let deleteSegment = () => { + if (toDelete.length === 0) { + removeFinishedEvent.remove(); + // If QuotaExceeded happened for current segment, retry the + // append + // Rescheduling will take care of updating=true problem. + // Also check that we found segments to delete, to avoid + // infinite looping if we can't delete anything + if (segmentIdx === currentSegment && deletedStuff) { + this.reportDebug('Retrying appendSegment for', segmentIdx); + this.appendSegment(segmentIdx, chunk); + } else { + this.reportDebug('Not retrying segment', segmentIdx); + this.sidx.entries[segmentIdx].requested = false; + } + return; + } + let idx = toDelete.shift(); + let entry = this.sidx.entries[idx]; + let start = entry.tickStart/this.sidx.timeScale; + let end = (entry.tickEnd+1)/this.sidx.timeScale; + this.reportDebug('Deleting segment', idx); + this.sourceBuffer.remove(start, end); + entry.have = false; + entry.requested = false; + } + removeFinishedEvent = addEvent(this.sourceBuffer, 'updateend', + deleteSegment); + if (!this.sourceBuffer.updating) + deleteSegment(); + } +} +Stream.prototype.getSegmentIdx = function(videoTime) { + // get an estimate + let currentTick = videoTime * this.sidx.timeScale; + let firstSegmentDuration = this.sidx.entries[0].subSegmentDuration; + let index = 1 + Math.floor(currentTick / firstSegmentDuration); + index = clamp(index, 0, this.sidx.entries.length - 1); + + let increment = 1; + if (currentTick < this.sidx.entries[index].tickStart){ + increment = -1; + } + + // go up or down to find correct index + while (index >= 0 && index < this.sidx.entries.length) { + let entry = this.sidx.entries[index]; + if (entry.tickStart <= currentTick && (entry.tickEnd+1) > currentTick){ + return index; + } + index = index + increment; + } + this.reportError('Could not find segment index for time', videoTime); + return 0; +} +Stream.prototype.checkBuffer = async function() { + if (this.avMerge.seeking) { + return; + } + // Find the first unbuffered segment, i + let currentSegmentIdx = this.getSegmentIdx(this.video.currentTime); + let bufferedBytesAhead = 0; + let i; + for (i = currentSegmentIdx; i < this.sidx.entries.length; i++) { + let entry = this.sidx.entries[i]; + // check if we had it before, but it was deleted by the browser + if (entry.have && !this.segmentInBuffer(i)) { + this.reportDebug('segment', i, 'deleted by browser'); + entry.have = false; + entry.requested = false; + } + if (!entry.have) { + break; + } + bufferedBytesAhead += entry.referencedSize; + if (bufferedBytesAhead > this.bufferTarget) { + return; + } + } + + if (i < this.sidx.entries.length && !this.sidx.entries[i].requested) { + this.fetchSegment(i); + // We have all the segments until the end + // Signal the end of stream + } else if (i == this.sidx.entries.length) { + if (this.streamType == 'audio') + this.avMerge.audioEndOfStream(); + else + this.avMerge.videoEndOfStream(); + } +} +Stream.prototype.segmentInBuffer = function(segmentIdx) { + let entry = this.sidx.entries[segmentIdx]; + // allow for 0.01 second error + let timeStart = entry.tickStart/this.sidx.timeScale + 0.01; + + /* Some of YouTube's mp4 fragments are malformed, with half-frame + playback gaps. In this video at 240p (timeScale = 90000 ticks/second) + https://www.youtube.com/watch?v=ZhOQCwJvwlo + segment 4 (starting at 0) is claimed in the sidx table to have + a duration of 388500 ticks, but closer examination of the file using + Bento4 mp4dump shows that the segment has 129 frames at 3000 ticks + per frame, which gives an actual duration of 38700 (1500 less than + claimed). The file is 30 fps, so this error is exactly half a frame. + + Note that the base_media_decode_time exactly matches the tickStart, + so the media decoder is being given a time gap of half a frame. + + The practical result of this is that sourceBuffer.buffered reports + a timeRange.end that is less than expected for that segment, resulting in + a false determination that the browser has deleted a segment. + + Segment 5 has the opposite issue, where it has a 1500 tick surplus of video + data compared to the sidx length. Segments 6 and 7 also have this + deficit-surplus pattern. + + This might have something to do with the fact that the video also + has 60 fps formats. In order to allow for adaptive streaming and seamless + quality switching, YouTube likely encodes their formats to line up nicely. + Either there is a bug in their encoder, or this is intentional. Allow for + up to 1 frame-time of error to work around this issue. */ + let endError; + if (this.streamType == 'video') + endError = 1/(this.avMerge.videoSource.fps || 30); + else + endError = 0.01 + let timeEnd = (entry.tickEnd+1)/this.sidx.timeScale - endError; + + let timeRanges = this.sourceBuffer.buffered; + for (let i=0; i < timeRanges.length; i++) { + if (timeRanges.start(i) <= timeStart && timeEnd <= timeRanges.end(i)) { + return true; + } + } + return false; +} +Stream.prototype.fetchSegment = function(segmentIdx) { + entry = this.sidx.entries[segmentIdx]; + entry.requested = true; + this.reportDebug( + 'Fetching segment', segmentIdx, ', bytes', + entry.start, entry.end, ', seconds', + entry.tickStart/this.sidx.timeScale, + (entry.tickEnd+1)/this.sidx.timeScale + ) + fetchRange( + this.url, + entry.start, + entry.end, + String(this.streamType) + ' segment ' + String(segmentIdx), + ).then(this.appendSegment.bind(this, segmentIdx)); +} +Stream.prototype.fetchSegmentIfNeeded = function(segmentIdx) { + if (segmentIdx < 0 || segmentIdx >= this.sidx.entries.length){ + return; + } + entry = this.sidx.entries[segmentIdx]; + // check if we had it before, but it was deleted by the browser + if (entry.have && !this.segmentInBuffer(segmentIdx)) { + this.reportDebug('segment', segmentIdx, 'deleted by browser'); + entry.have = false; + entry.requested = false; + } + if (entry.requested) { + return; + } + + this.fetchSegment(segmentIdx); +} +Stream.prototype.handleSeek = function() { + let segmentIdx = this.getSegmentIdx(this.video.currentTime); + this.fetchSegmentIfNeeded(segmentIdx); +} +Stream.prototype.reportDebug = function(...args) { + reportDebug(String(this.streamType) + ':', ...args); +} +Stream.prototype.reportWarning = function(...args) { + reportWarning(String(this.streamType) + ':', ...args); +} +Stream.prototype.reportError = function(...args) { + reportError(String(this.streamType) + ':', ...args); +} + + +// Utility functions + +// https://gomakethings.com/promise-based-xhr/ +// https://stackoverflow.com/a/30008115 +// http://lofi.limo/blog/retry-xmlhttprequest-carefully +function fetchRange(url, start, end, debugInfo) { + return new Promise((resolve, reject) => { + let retryCount = 0; + let xhr = new XMLHttpRequest(); + function onFailure(err, message, maxRetries=5){ + message = debugInfo + ': ' + message + ' - Err: ' + String(err); + retryCount++; + if (retryCount > maxRetries || xhr.status == 403){ + reportError('fetchRange error while fetching ' + message); + reject(message); + return; + } else { + reportWarning('Failed to fetch ' + message + + '. Attempting retry ' + + String(retryCount) +'/' + String(maxRetries)); + } + + // Retry in 1 second, doubled for each next retry + setTimeout(function(){ + xhr.open('get',url); + xhr.send(); + }, 1000*Math.pow(2,(retryCount-1))); + } + xhr.open('get', url); + xhr.timeout = 15000; + xhr.responseType = 'arraybuffer'; + xhr.setRequestHeader('Range', 'bytes=' + start + '-' + end); + xhr.onload = function (e) { + if (xhr.status >= 200 && xhr.status < 300) { + resolve(xhr.response); + } else { + onFailure(e, + 'Status ' + + String(xhr.status) + ' ' + String(xhr.statusText) + ); + } + }; + xhr.onerror = function (event) { + onFailure(e, 'Network error'); + }; + xhr.ontimeout = function (event){ + xhr.timeout += 5000; + onFailure(null, 'Timeout (15s)', maxRetries=5); + }; + xhr.send(); + }); +} + +function debounce(func, wait, immediate) { + let timeout; + return function() { + let context = this; + let args = arguments; + let later = function() { + timeout = null; + if (!immediate) func.apply(context, args); + }; + let callNow = immediate && !timeout; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + if (callNow) func.apply(context, args); + }; +} + +function clamp(number, min, max) { + return Math.max(min, Math.min(number, max)); +} + +// allow to remove an event listener without having a function reference +function RegisteredEvent(obj, eventName, func) { + this.obj = obj; + this.eventName = eventName; + this.func = func; + obj.addEventListener(eventName, func); +} +RegisteredEvent.prototype.remove = function() { + this.obj.removeEventListener(this.eventName, this.func); +} +function addEvent(obj, eventName, func) { + return new RegisteredEvent(obj, eventName, func); +} + +function reportWarning(...args){ + console.warn(...args); +} +function reportError(...args){ + console.error(...args); +} +function reportDebug(...args){ + console.debug(...args); +} + +function byteArrayToIntegerLittleEndian(unsignedByteArray){ + let result = 0; + for (byte of unsignedByteArray){ + result = result*256; + result += byte + } + return result; +} +function byteArrayToFloat(byteArray) { + let view = new DataView(byteArray.buffer); + if (byteArray.length == 4) + return view.getFloat32(byteArray.byteOffset); + else + return view.getFloat64(byteArray.byteOffset); +} +function ByteParser(data){ + this.curIndex = 0; + this.data = new Uint8Array(data); +} +ByteParser.prototype.readInteger = function(nBytes){ + let result = byteArrayToIntegerLittleEndian( + this.data.slice(this.curIndex, this.curIndex + nBytes) + ); + this.curIndex += nBytes; + return result; +} +ByteParser.prototype.readBufferBytes = function(nBytes){ + let result = this.data.slice(this.curIndex, this.curIndex + nBytes); + this.curIndex += nBytes; + return result; +} + +// BEGIN iso-bmff-parser-stream/lib/box/sidx.js (modified) +// https://github.com/necccc/iso-bmff-parser-stream/blob/master/lib/box/sidx.js +/* The MIT License (MIT) + +Copyright (c) 2014 Szabolcs Szabolcsi-Toth + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.*/ +function sidx_parse (data, offset) { + let bp = new ByteParser(data), + version = bp.readInteger(1), + flags = bp.readInteger(3), + referenceId = bp.readInteger(4), + timeScale = bp.readInteger(4), + earliestPresentationTime = bp.readInteger(version === 0 ? 4 : 8), + firstOffset = bp.readInteger(4), + __reserved = bp.readInteger(2), + entryCount = bp.readInteger(2), + entries = []; + + let totalBytesOffset = firstOffset + offset; + let totalTicks = 0; + for (let i = entryCount; i > 0; i=i-1 ) { + let referencedSize = bp.readInteger(4), + subSegmentDuration = bp.readInteger(4), + unused = bp.readBufferBytes(4) + entries.push({ + referencedSize: referencedSize, + subSegmentDuration: subSegmentDuration, + unused: unused, + start: totalBytesOffset, + end: totalBytesOffset + referencedSize - 1, // inclusive + tickStart: totalTicks, + tickEnd: totalTicks + subSegmentDuration - 1, + requested: false, + have: false, + }); + totalBytesOffset = totalBytesOffset + referencedSize; + totalTicks = totalTicks + subSegmentDuration; + } + + return { + version: version, + flags: flags, + referenceId: referenceId, + timeScale: timeScale, + earliestPresentationTime: earliestPresentationTime, + firstOffset: firstOffset, + entries: entries + }; +} +// END sidx.js + +// BEGIN iso-bmff-parser-stream/lib/unbox.js (same license), modified +function unbox(buf) { + let bp = new ByteParser(buf), + bufferLength = buf.length, + length, + typeData, + boxData + + length = bp.readInteger(4); // length of entire box, + typeData = bp.readInteger(4); + + if (bufferLength - length < 0) { + reportWarning('Warning: sidx table is cut off'); + return { + currentLength: bufferLength, + length: length, + type: typeData, + data: bp.readBufferBytes(bufferLength) + }; + } + + boxData = bp.readBufferBytes(length - 8); + + return { + length: length, + type: typeData, + data: boxData + }; +} +// END unbox.js + + +function extractWebmInitializationInfo(initializationSegment) { + let result = { + timeScale: null, + cuesOffset: null, + duration: null, + }; + (new EbmlDecoder()).readTags(initializationSegment, (tagType, tag) => { + if (tag.name == 'TimecodeScale') + result.timeScale = byteArrayToIntegerLittleEndian(tag.data); + else if (tag.name == 'Duration') + // Integer represented as a float (why??); units of TimecodeScale + result.duration = byteArrayToFloat(tag.data); + // https://lists.matroska.org/pipermail/matroska-devel/2013-July/004549.html + // "CueClusterPosition in turn is relative to the segment's data start + // position" (the data start is the position after the bytes + // used to represent the tag ID and entry size) + else if (tagType == 'start' && tag.name == 'Segment') + result.cuesOffset = tag.dataStart; + }); + if (result.timeScale === null) { + result.timeScale = 1000000; + } + + // webm timecodeScale is the number of nanoseconds in a tick + // Convert it to number of ticks per second to match mp4 convention + result.timeScale = 10**9/result.timeScale; + return result; +} +function parseWebmCues(indexSegment, initInfo) { + let entries = []; + let currentEntry = {}; + let cuesOffset = initInfo.cuesOffset; + (new EbmlDecoder()).readTags(indexSegment, (tagType, tag) => { + if (tag.name == 'CueTime') { + const tickStart = byteArrayToIntegerLittleEndian(tag.data); + currentEntry.tickStart = tickStart; + if (entries.length !== 0) + entries[entries.length - 1].tickEnd = tickStart - 1; + } else if (tag.name == 'CueClusterPosition') { + const byteStart = byteArrayToIntegerLittleEndian(tag.data); + currentEntry.start = cuesOffset + byteStart; + if (entries.length !== 0) + entries[entries.length - 1].end = cuesOffset + byteStart - 1; + } else if (tagType == 'end' && tag.name == 'CuePoint') { + entries.push(currentEntry); + currentEntry = {}; + } + }); + if (initInfo.duration) + entries[entries.length - 1].tickEnd = initInfo.duration - 1; + return entries; +} + +// BEGIN node-ebml (modified) for parsing WEBM cues table +// https://github.com/node-ebml/node-ebml + +/* Copyright (c) 2013-2018 Mark Schmale and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.*/ + +const schema = new Map([ + [0x18538067, ['Segment', 'm']], + [0x1c53bb6b, ['Cues', 'm']], + [0xbb, ['CuePoint', 'm']], + [0xb3, ['CueTime', 'u']], + [0xb7, ['CueTrackPositions', 'm']], + [0xf7, ['CueTrack', 'u']], + [0xf1, ['CueClusterPosition', 'u']], + [0x1549a966, ['Info', 'm']], + [0x2ad7b1, ['TimecodeScale', 'u']], + [0x4489, ['Duration', 'f']], +]); + + +function EbmlDecoder() { + this.buffer = null; + this.emit = null; + this.tagStack = []; + this.cursor = 0; +} +EbmlDecoder.prototype.readTags = function(chunk, onParsedTag) { + this.buffer = new Uint8Array(chunk); + this.emit = onParsedTag; + + while (this.cursor < this.buffer.length) { + if (!this.readTag() || !this.readSize() || !this.readContent()) { + break; + } + } +} +EbmlDecoder.prototype.getSchemaInfo = function(tag) { + if (Number.isInteger(tag) && schema.has(tag)) { + let name, type; + [name, type] = schema.get(tag); + return {name, type}; + } + return { + type: null, + name: 'unknown', + }; +} +EbmlDecoder.prototype.readTag = function() { + if (this.cursor >= this.buffer.length) { + return false; + } + + const tag = readVint(this.buffer, this.cursor); + if (tag == null) { + return false; + } + + const tagObj = { + tag: tag.value, + ...this.getSchemaInfo(tag.valueWithLeading1), + start: this.cursor, + end: this.cursor + tag.length, // exclusive; also overwritten below + }; + this.tagStack.push(tagObj); + + this.cursor += tag.length; + return true; +} +EbmlDecoder.prototype.readSize = function() { + const tagObj = this.tagStack[this.tagStack.length - 1]; + + if (this.cursor >= this.buffer.length) { + return false; + } + + const size = readVint(this.buffer, this.cursor); + if (size == null) { + return false; + } + + tagObj.dataSize = size.value; + + // unknown size + if (size.value === -1) { + tagObj.end = -1; + } else { + tagObj.end += size.value + size.length; + } + + this.cursor += size.length; + tagObj.dataStart = this.cursor; + return true; +} +EbmlDecoder.prototype.readContent = function() { + const { type, dataSize, ...rest } = this.tagStack[ + this.tagStack.length - 1 + ]; + + if (type === 'm') { + this.emit('start', { type, dataSize, ...rest }); + return true; + } + + if (this.buffer.length < this.cursor + dataSize) { + return false; + } + + const data = this.buffer.subarray(this.cursor, this.cursor + dataSize); + this.cursor += dataSize; + + this.tagStack.pop(); // remove the object from the stack + + this.emit('tag', { type, dataSize, data, ...rest }); + + while (this.tagStack.length > 0) { + const topEle = this.tagStack[this.tagStack.length - 1]; + if (this.cursor < topEle.end) { + break; + } + this.emit('end', topEle); + this.tagStack.pop(); + } + return true; +} + + +// user234683 notes: The matroska variable integer format is as follows: +// The first byte is where the length of the integer in bytes is determined. +// The number of bytes for the integer is equal to the number of leading +// zeroes in that first byte PLUS 1. Then there is a single 1 bit separator, +// and the rest of the bits in the first byte and the rest of the bits in +// the subsequent bytes are the value of the number. Note the 1-bit separator +// is not part of the value, but by convention IS included in the value for the +// EBML Tag IDs in the schema table above +// The byte-length includes the first byte. So one could also say the number +// of leading zeros is the number of subsequent bytes to include. +function readVint(buffer, start = 0) { + const length = 8 - Math.floor(Math.log2(buffer[start])); + + if (start + length > buffer.length) { + return null; + } + + let value = buffer[start] & ((1 << (8 - length)) - 1); + let valueWithLeading1 = buffer[start] & ((1 << (8 - length + 1)) - 1); + for (let i = 1; i < length; i += 1) { + // user234683 notes: Bails out with -1 (unknown) if the value would + // exceed 53 bits, which is the limit since JavaScript stores all + // numbers as floating points. See + // https://github.com/node-ebml/node-ebml/issues/49 + if (i === 7) { + if (value >= 2 ** 8 && buffer[start + 7] > 0) { + return { length, value: -1, valueWithLeading1: -1 }; + } + } + value *= 2 ** 8; + value += buffer[start + i]; + valueWithLeading1 *= 2 ** 8; + valueWithLeading1 += buffer[start + i]; + } + + return { length, value, valueWithLeading1 }; +} +// END node-ebml diff --git a/youtube/static/js/comments.js b/youtube/static/js/comments.js new file mode 100644 index 0000000..14ba0c0 --- /dev/null +++ b/youtube/static/js/comments.js @@ -0,0 +1,20 @@ +function onClickReplies(e) { + let details = e.target.parentElement; + // e.preventDefault(); + console.log("loading replies .."); + doXhr(details.getAttribute("data-src") + "&slim=1", (html) => { + let div = details.querySelector(".comment_page"); + div.innerHTML = html; + }); + details.removeEventListener('click', onClickReplies); +} + +window.addEventListener('DOMContentLoaded', function() { + QA("details.replies").forEach(details => { + details.addEventListener('click', onClickReplies); + details.addEventListener('auxclick', (e) => { + if (e.target.parentElement !== details) return; + if (e.button == 1) window.open(details.getAttribute("data-src")); + }); + }); +}); diff --git a/youtube/static/js/common.js b/youtube/static/js/common.js new file mode 100644 index 0000000..599d578 --- /dev/null +++ b/youtube/static/js/common.js @@ -0,0 +1,116 @@ +const Q = document.querySelector.bind(document); +const QA = document.querySelectorAll.bind(document); +const QId = document.getElementById.bind(document); +let seconds, + minutes, + hours; +function text(msg) { return document.createTextNode(msg); } +function clearNode(node) { while (node.firstChild) node.removeChild(node.firstChild); } +function toTimestamp(seconds) { + seconds = Math.floor(seconds); + + minutes = Math.floor(seconds/60); + seconds = seconds % 60; + + hours = Math.floor(minutes/60); + minutes = minutes % 60; + + if (hours) { + return `0${hours}:`.slice(-3) + `0${minutes}:`.slice(-3) + `0${seconds}`.slice(-2); + } + return `0${minutes}:`.slice(-3) + `0${seconds}`.slice(-2); +} + +let cur_track_idx = 0; +function getActiveTranscriptTrackIdx() { + let textTracks = QId("js-video-player").textTracks; + if (!textTracks.length) return; + for (let i=0; i < textTracks.length; i++) { + if (textTracks[i].mode == "showing") { + cur_track_idx = i; + return cur_track_idx; + } + } + return cur_track_idx; +} +function getActiveTranscriptTrack() { return QId("js-video-player").textTracks[getActiveTranscriptTrackIdx()]; } + +function getDefaultTranscriptTrackIdx() { + let textTracks = QId("js-video-player").textTracks; + return textTracks.length - 1; +} + +function doXhr(url, callback=null) { + let xhr = new XMLHttpRequest(); + xhr.open("GET", url); + xhr.onload = (e) => { + callback(e.currentTarget.response); + } + xhr.send(); + return xhr; +} + +// https://stackoverflow.com/a/30810322 +function copyTextToClipboard(text) { + let textArea = document.createElement("textarea"); + + // + // *** This styling is an extra step which is likely not required. *** + // + // Why is it here? To ensure: + // 1. the element is able to have focus and selection. + // 2. if element was to flash render it has minimal visual impact. + // 3. less flakyness with selection and copying which **might** occur if + // the textarea element is not visible. + // + // The likelihood is the element won't even render, not even a + // flash, so some of these are just precautions. However in + // Internet Explorer the element is visible whilst the popup + // box asking the user for permission for the web page to + // copy to the clipboard. + // + + // Place in top-left corner of screen regardless of scroll position. + textArea.style.position = 'fixed'; + textArea.style.top = 0; + textArea.style.left = 0; + + // Ensure it has a small width and height. Setting to 1px / 1em + // doesn't work as this gives a negative w/h on some browsers. + textArea.style.width = '2em'; + textArea.style.height = '2em'; + + // We don't need padding, reducing the size if it does flash render. + textArea.style.padding = 0; + + // Clean up any borders. + textArea.style.border = 'none'; + textArea.style.outline = 'none'; + textArea.style.boxShadow = 'none'; + + // Avoid flash of white box if rendered for any reason. + textArea.style.background = 'transparent'; + + + textArea.value = text; + + let parent_el = video.parentElement; + parent_el.appendChild(textArea); + textArea.focus(); + textArea.select(); + + try { + let successful = document.execCommand('copy'); + let msg = successful ? 'successful' : 'unsuccessful'; + console.log('Copying text command was ' + msg); + } catch (err) { + console.log('Oops, unable to copy'); + } + + parent_el.removeChild(textArea); +} + + +window.addEventListener('DOMContentLoaded', function() { + cur_track_idx = getDefaultTranscriptTrackIdx(); +}); diff --git a/youtube/static/js/hotkeys.js b/youtube/static/js/hotkeys.js new file mode 100644 index 0000000..b71972e --- /dev/null +++ b/youtube/static/js/hotkeys.js @@ -0,0 +1,61 @@ +function onKeyDown(e) { + if (['INPUT', 'TEXTAREA'].includes(document.activeElement.tagName)) return false; + + // console.log(e); + let v = QId("js-video-player"); + if (!e.isTrusted) return; // plyr CustomEvent + let c = e.key.toLowerCase(); + if (e.ctrlKey) return; + else if (c == "k") { + v.paused ? v.play() : v.pause(); + } + else if (c == "arrowleft") { + e.preventDefault(); + v.currentTime = v.currentTime - 5; + } + else if (c == "arrowright") { + e.preventDefault(); + v.currentTime = v.currentTime + 5; + } + else if (c == "j") { + e.preventDefault(); + v.currentTime = v.currentTime - 10; + } + else if (c == "l") { + e.preventDefault(); + v.currentTime = v.currentTime + 10; + } + else if (c == "f") { + e.preventDefault(); + if (data.settings.use_video_player == 2) { + player.fullscreen.toggle() + } + else { + if (document.fullscreen) { + document.exitFullscreen() + } + else { + v.requestFullscreen() + } + } + } + else if (c == "m") { + if (v.muted == false) {v.muted = true;} + else {v.muted = false;} + } + else if (c == "c") { + e.preventDefault(); + let tt = getActiveTranscriptTrack(); + if (tt == null) return; + if (tt.mode == "showing") tt.mode = "disabled"; + else tt.mode = "showing"; + } + else if (c == "t") { + let ts = Math.floor(QId("js-video-player").currentTime); + copyTextToClipboard(`https://youtu.be/${data.video_id}?t=${ts}`); + } +} + +window.addEventListener('DOMContentLoaded', function() { + document.addEventListener('keydown', onKeyDown); +}); diff --git a/youtube/static/js/playlistadd.js b/youtube/static/js/playlistadd.js new file mode 100644 index 0000000..4b76ce1 --- /dev/null +++ b/youtube/static/js/playlistadd.js @@ -0,0 +1,86 @@ +(function main() { + /* Takes control of the form if javascript is enabled, so that adding stuff to a playlist will not cause things to stop loading, and will display a status message. If javascript is disabled, the form will still work using regular HTML methods, but causes things on the page (such as the video) to stop loading. */ + const playlistAddForm = document.getElementById('playlist-edit'); + + function setStyle(element, property, value){ + element.style[property] = value; + } + function removeMessage(messageBox){ + messageBox.parentNode.removeChild(messageBox); + } + + function displayMessage(text, error=false){ + let currentMessageBox = document.getElementById('message-box'); + if(currentMessageBox !== null){ + currentMessageBox.parentNode.removeChild(currentMessageBox); + } + let messageBox = document.createElement('div'); + if(error){ + messageBox.setAttribute('role', 'alert'); + } else { + messageBox.setAttribute('role', 'status'); + } + messageBox.setAttribute('id', 'message-box'); + let textNode = document.createTextNode(text); + messageBox.appendChild(textNode); + document.querySelector('main').appendChild(messageBox); + let currentstyle = window.getComputedStyle(messageBox); + let removalDelay; + if(error){ + removalDelay = 5000; + } else { + removalDelay = 1500; + } + window.setTimeout(setStyle, 20, messageBox, 'opacity', 1); + window.setTimeout(setStyle, removalDelay, messageBox, 'opacity', 0); + window.setTimeout(removeMessage, removalDelay+300, messageBox); + } + // https://developer.mozilla.org/en-US/docs/Learn/HTML/Forms/Sending_forms_through_JavaScript + function sendData(event){ + let clicked_button = document.activeElement; + if(clicked_button === null || clicked_button.getAttribute('type') !== 'submit' || clicked_button.parentElement != event.target){ + console.log('ERROR: clicked_button not valid'); + return; + } + if(clicked_button.getAttribute('value') !== 'add'){ + return; // video(s) are being removed from playlist, just let it refresh the page + } + event.preventDefault(); + let XHR = new XMLHttpRequest(); + let FD = new FormData(playlistAddForm); + + if(FD.getAll('video_info_list').length === 0){ + displayMessage('Error: No videos selected', true); + return; + } + + if(FD.get('playlist_name') === ""){ + displayMessage('Error: No playlist selected', true); + return; + } + + // https://stackoverflow.com/questions/48322876/formdata-doesnt-include-value-of-buttons + FD.append('action', 'add'); + + XHR.addEventListener('load', function(event){ + if(event.target.status == 204){ + displayMessage('Added videos to playlist "' + FD.get('playlist_name') + '"'); + } else { + displayMessage('Error adding videos to playlist: ' + event.target.status.toString(), true); + } + }); + + XHR.addEventListener('error', function(event){ + if(event.target.status == 0){ + displayMessage('XHR failed: Check that XHR requests are allowed', true); + } else { + displayMessage('XHR failed: Unknown error', true); + } + }); + + XHR.open('POST', playlistAddForm.getAttribute('action')); + XHR.send(FD); + } + + playlistAddForm.addEventListener('submit', sendData); +}()); diff --git a/youtube/static/js/plyr-start.js b/youtube/static/js/plyr-start.js new file mode 100644 index 0000000..56068f0 --- /dev/null +++ b/youtube/static/js/plyr-start.js @@ -0,0 +1,121 @@ +(function main() { + 'use strict'; + + // Captions + let captionsActive = false; + if (data.settings.subtitles_mode === 2 || (data.settings.subtitles_mode === 1 && data.has_manual_captions)) { + captionsActive = true; + } + + // AutoPlay + let autoplayActive = data.settings.autoplay_videos || false; + + let qualityOptions = []; + let qualityDefault; + + for (let src of data.uni_sources) { + qualityOptions.push(src.quality_string); + } + + for (let src of data.pair_sources) { + qualityOptions.push(src.quality_string); + } + + if (data.using_pair_sources) { + qualityDefault = data.pair_sources[data.pair_idx].quality_string; + } else if (data.uni_sources.length !== 0) { + qualityDefault = data.uni_sources[data.uni_idx].quality_string; + } else { + qualityDefault = 'None'; + } + + // Fix plyr refusing to work with qualities that are strings + Object.defineProperty(Plyr.prototype, 'quality', { + set: function (input) { + const config = this.config.quality; + const options = this.options.quality; + let quality = input; + let updateStorage = true; + + if (!options.length) { + return; + } + + if (!options.includes(quality)) { + return; + } + + // Update config + config.selected = quality; + + // Set quality + this.media.quality = quality; + + // Save to storage + if (updateStorage) { + this.storage.set({ quality }); + } + }, + }); + + const player = new Plyr(document.getElementById('js-video-player'), { + // Learning about autoplay permission https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Permissions-Policy/autoplay#syntax + autoplay: autoplayActive, + disableContextMenu: false, + captions: { + active: captionsActive, + language: data.settings.subtitles_language, + }, + controls: [ + 'play-large', + 'play', + 'progress', + 'current-time', + 'duration', + 'mute', + 'volume', + 'captions', + 'settings', + 'pip', + 'airplay', + 'fullscreen', + ], + iconUrl: '/youtube.com/static/modules/plyr/plyr.svg', + blankVideo: '/youtube.com/static/modules/plyr/blank.webm', + debug: false, + storage: { enabled: false }, + quality: { + default: qualityDefault, + options: qualityOptions, + forced: true, + onChange: function (quality) { + if (quality == 'None') { + return; + } + if (quality.includes('(integrated)')) { + for (let i = 0; i < data.uni_sources.length; i++) { + if (data.uni_sources[i].quality_string == quality) { + changeQuality({ type: 'uni', index: i }); + return; + } + } + } else { + for (let i = 0; i < data.pair_sources.length; i++) { + if (data.pair_sources[i].quality_string == quality) { + changeQuality({ type: 'pair', index: i }); + return; + } + } + } + }, + }, + previewThumbnails: { + enabled: storyboard_url !== null, + src: [storyboard_url], + }, + settings: ['captions', 'quality', 'speed', 'loop'], + tooltips: { + controls: true, + }, + }); +})(); diff --git a/youtube/static/js/sponsorblock.js b/youtube/static/js/sponsorblock.js new file mode 100644 index 0000000..a929fc5 --- /dev/null +++ b/youtube/static/js/sponsorblock.js @@ -0,0 +1,40 @@ +"use strict"; + +// from: https://git.gir.st/subscriptionfeed.git/blob/59a590d:/app/youtube/templates/watch.html.j2#l28 + +let sha256=function a(b){function c(a,b){return a>>>b|a<<32-b}for(var d,e,f=Math.pow,g=f(2,32),h="length",i="",j=[],k=8*b[h],l=a.h=a.h||[],m=a.k=a.k||[],n=m[h],o={},p=2;64>n;p++)if(!o[p]){for(d=0;313>d;d+=p)o[d]=p;l[n]=f(p,.5)*g|0,m[n++]=f(p,1/3)*g|0}for(b+="\x80";b[h]%64-56;)b+="\x00";for(d=0;d>8)return;j[d>>2]|=e<<(3-d)%4*8}for(j[j[h]]=k/g|0,j[j[h]]=k,e=0;ed;d++){var s=q[d-15],t=q[d-2],u=l[0],v=l[4],w=l[7]+(c(v,6)^c(v,11)^c(v,25))+(v&l[5]^~v&l[6])+m[d]+(q[d]=16>d?q[d]:q[d-16]+(c(s,7)^c(s,18)^s>>>3)+q[d-7]+(c(t,17)^c(t,19)^t>>>10)|0),x=(c(u,2)^c(u,13)^c(u,22))+(u&l[1]^u&l[2]^l[1]&l[2]);l=[w+x|0].concat(l),l[4]=l[4]+w|0}for(d=0;8>d;d++)l[d]=l[d]+r[d]|0}for(d=0;8>d;d++)for(e=3;e+1;e--){var y=l[d]>>8*e&255;i+=(16>y?0:"")+y.toString(16)}return i}; /*https://geraintluff.github.io/sha256/sha256.min.js (public domain)*/ + +window.addEventListener("load", load_sponsorblock); +document.addEventListener('DOMContentLoaded', ()=>{ + const check = document.querySelector("#skip_sponsors"); + check.addEventListener("change", () => {if (check.checked) load_sponsorblock()}); +}); +function load_sponsorblock(){ + const info_elem = Q('#skip_n'); + if (info_elem.innerText.length) return; // already fetched + const hash = sha256(data.video_id).substr(0,4); + const video_obj = QId("js-video-player"); + let url = `/https://sponsor.ajay.app/api/skipSegments/${hash}`; + fetch(url) + .then(response => response.json()) + .then(r => { + for (const video of r) { + if (video.videoID != data.video_id) continue; + info_elem.innerText = `(${video.segments.length} segments)`; + const cat_n = video.segments.map(e=>e.category).sort() + .reduce((acc,e) => (acc[e]=(acc[e]||0)+1, acc), {}); + info_elem.title = Object.entries(cat_n).map(e=>e.join(': ')).join(', '); + for (const segment of video.segments) { + const [start, stop] = segment.segment; + if (segment.category != "sponsor") continue; + video_obj.addEventListener("timeupdate", function() { + if (Q("#skip_sponsors").checked && + this.currentTime >= start && + this.currentTime < stop-1) { + this.currentTime = stop; + } + }); + } + } + }); +} diff --git a/youtube/static/js/transcript-table.js b/youtube/static/js/transcript-table.js new file mode 100644 index 0000000..5cee97e --- /dev/null +++ b/youtube/static/js/transcript-table.js @@ -0,0 +1,151 @@ +let details_tt, select_tt, table_tt; + +function renderCues() { + const selectedTrack = QId("js-video-player").textTracks[select_tt.selectedIndex]; + const cuesList = [...selectedTrack.cues]; + const is_automatic = cuesList[0].text.startsWith(" \n"); + + // Firefox ignores cues starting with a blank line containing a space + // Automatic captions contain such a blank line in the first cue + let ff_bug = false; + if (!cuesList[0].text.length) { ff_bug = true; is_automatic = true }; + let rows; + + function forEachCue(callback) { + for (let i=0; i < cuesList.length; i++) { + let txt, startTime = selectedTrack.cues[i].startTime; + if (is_automatic) { + // Automatic captions repeat content. The new segment is displayed + // on the bottom row; the old one is displayed on the top row. + // So grab the bottom row only. Skip every other cue because the bottom + // row is empty. + if (i % 2) continue; + if (ff_bug && !selectedTrack.cues[i].text.length) { + txt = selectedTrack.cues[i+1].text; + } else { + txt = selectedTrack.cues[i].text.split('\n')[1].replace(/<[\d:.]*?>(.*?)<\/c>/g, "$1"); + } + } else { + txt = selectedTrack.cues[i].text; + } + callback(startTime, txt); + } + } + + function createTimestampLink(startTime, txt, title=null) { + a = document.createElement("a"); + a.appendChild(text(txt)); + a.href = "javascript:;"; // TODO: replace this with ?t parameter + if (title) a.title = title; + a.addEventListener("click", (e) => { + QId("js-video-player").currentTime = startTime; + }) + return a; + } + + clearNode(table_tt); + console.log("render cues..", selectedTrack.cues.length); + if (Q("input#transcript-use-table").checked) { + forEachCue((startTime, txt) => { + let tr, td, a; + tr = document.createElement("tr"); + + td = document.createElement("td") + td.appendChild(createTimestampLink(startTime, toTimestamp(startTime))); + tr.appendChild(td); + + td = document.createElement("td") + td.appendChild(text(txt)); + tr.appendChild(td); + + table_tt.appendChild(tr); + }); + rows = table_tt.rows; + } + else { + forEachCue((startTime, txt) => { + span = document.createElement("span"); + let idx = txt.indexOf(" ", 1); + let [firstWord, rest] = [txt.slice(0, idx), txt.slice(idx)]; + + span.appendChild(createTimestampLink(startTime, firstWord, toTimestamp(startTime))); + if (rest) span.appendChild(text(rest + " ")); + table_tt.appendChild(span); + }); + rows = table_tt.childNodes; + } + + let lastActiveRow = null; + let row; + function colorCurRow(e) { + // console.log("cuechange:", e); + let activeCueIdx = cuesList.findIndex((c) => c == selectedTrack.activeCues[0]); + let activeRowIdx = is_automatic ? Math.floor(activeCueIdx / 2) : activeCueIdx; + + if (lastActiveRow) lastActiveRow.style.backgroundColor = ""; + if (activeRowIdx < 0) return; + row = rows[activeRowIdx]; + row.style.backgroundColor = "#0cc12e42"; + lastActiveRow = row; + } + selectedTrack.addEventListener("cuechange", colorCurRow); +} + +function loadCues() { + const textTracks = QId("js-video-player").textTracks; + const selectedTrack = textTracks[select_tt.selectedIndex]; + + // See https://developer.mozilla.org/en-US/docs/Web/API/TextTrack/mode + // This code will (I think) make sure that the selected track's cues + // are loaded even if the track subtitles aren't on (showing). Setting it + // to hidden will load them. + let selected_track_target_mode = "hidden"; + + for (let track of textTracks) { + // Want to avoid unshowing selected track if it's showing + if (track.mode === "showing") selected_track_target_mode = "showing"; + + if (track !== selectedTrack) track.mode = "disabled"; + } + if (selectedTrack.mode == "disabled") { + selectedTrack.mode = selected_track_target_mode; + } + + let intervalID = setInterval(() => { + if (selectedTrack.cues && selectedTrack.cues.length) { + clearInterval(intervalID); + renderCues(); + } + }, 100); +} + +window.addEventListener('DOMContentLoaded', function() { + const textTracks = QId("js-video-player").textTracks; + if (!textTracks.length) return; + + details_tt = Q("details#transcript-details"); + details_tt.addEventListener("toggle", () => { + if (details_tt.open) loadCues(); + }); + + select_tt = Q("select#select-tt"); + select_tt.selectedIndex = getDefaultTranscriptTrackIdx(); + select_tt.addEventListener("change", loadCues); + + table_tt = Q("table#transcript-table"); + table_tt.appendChild(text("loading...")); + + textTracks.addEventListener("change", (e) => { + // console.log(e); + let idx = getActiveTranscriptTrackIdx(); // sadly not provided by 'e' + if (textTracks[idx].mode == "showing") { + select_tt.selectedIndex = idx; + loadCues(); + } + else if (details_tt.open && textTracks[idx].mode == "disabled") { + textTracks[idx].mode = "hidden"; // so we still receive 'oncuechange' + } + }) + + Q("input#transcript-use-table").addEventListener("change", renderCues); +}); diff --git a/youtube/static/js/watch.js b/youtube/static/js/watch.js new file mode 100644 index 0000000..95d9fa7 --- /dev/null +++ b/youtube/static/js/watch.js @@ -0,0 +1,199 @@ +const video = document.getElementById('js-video-player'); + +function changeQuality(selection) { + let currentVideoTime = video.currentTime; + let videoPaused = video.paused; + let videoSpeed = video.playbackRate; + let srcInfo; + if (avMerge) + avMerge.close(); + if (selection.type == 'uni'){ + srcInfo = data['uni_sources'][selection.index]; + video.src = srcInfo.url; + } else { + srcInfo = data['pair_sources'][selection.index]; + avMerge = new AVMerge(video, srcInfo, currentVideoTime); + } + video.currentTime = currentVideoTime; + if (!videoPaused){ + video.play(); + } + video.playbackRate = videoSpeed; +} + +// Initialize av-merge +let avMerge; +if (data.using_pair_sources) { + let srcPair = data['pair_sources'][data['pair_idx']]; + // Do it dynamically rather than as the default in jinja + // in case javascript is disabled + avMerge = new AVMerge(video, srcPair, 0); +} + +// Quality selector +const qs = document.getElementById('quality-select'); +if (qs) { + qs.addEventListener('change', function(e) { + changeQuality(JSON.parse(this.value)) + }); +} + +// Set up video start time from &t parameter +if (data.time_start != 0 && video) {video.currentTime = data.time_start}; + +// External video speed control +let speedInput = document.getElementById('speed-control'); +speedInput.addEventListener('keyup', (event) => { + if (event.key === 'Enter') { + let speed = parseFloat(speedInput.value); + if(!isNaN(speed)){ + video.playbackRate = speed; + } + } +}); + + +// Playlist lazy image loading +if (data.playlist && data.playlist['id'] !== null) { + // lazy load playlist images + // copied almost verbatim from + // https://css-tricks.com/tips-for-rolling-your-own-lazy-loading/ + // IntersectionObserver isn't supported in pre-quantum + // firefox versions, but the alternative of making it + // manually is a performance drain, so oh well + let observer = new IntersectionObserver(lazyLoad, { + + // where in relation to the edge of the viewport, we are observing + rootMargin: "100px", + + // how much of the element needs to have intersected + // in order to fire our loading function + threshold: 1.0 + + }); + + function lazyLoad(elements) { + elements.forEach(item => { + if (item.intersectionRatio > 0) { + + // set the src attribute to trigger a load + item.target.src = item.target.dataset.src; + + // stop observing this element. Our work here is done! + observer.unobserve(item.target); + }; + }); + }; + + // Tell our observer to observe all img elements with a "lazy" class + let lazyImages = document.querySelectorAll('img.lazy'); + lazyImages.forEach(img => { + observer.observe(img); + }); +} + + +// Autoplay +if (data.settings.related_videos_mode !== 0 || data.playlist !== null) { + let playability_error = !!data.playability_error; + let isPlaylist = false; + if (data.playlist !== null && data.playlist['current_index'] !== null) + isPlaylist = true; + + // read cookies on whether to autoplay + // https://developer.mozilla.org/en-US/docs/Web/API/Document/cookie + let cookieValue; + let playlist_id; + if (isPlaylist) { + // from https://stackoverflow.com/a/6969486 + function escapeRegExp(string) { + // $& means the whole matched string + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + } + playlist_id = data.playlist['id']; + playlist_id = escapeRegExp(playlist_id); + + cookieValue = document.cookie.replace(new RegExp( + '(?:(?:^|.*;\\s*)autoplay_' + + playlist_id + '\\s*\\=\\s*([^;]*).*$)|^.*$' + ), '$1'); + } else { + cookieValue = document.cookie.replace(new RegExp( + '(?:(?:^|.*;\\s*)autoplay\\s*\\=\\s*([^;]*).*$)|^.*$' + ),'$1'); + } + + let autoplayEnabled = 0; + if(cookieValue.length === 0){ + autoplayEnabled = 0; + } else { + autoplayEnabled = Number(cookieValue); + } + + // check the checkbox if autoplay is on + let checkbox = document.querySelector('.autoplay-toggle'); + if(autoplayEnabled){ + checkbox.checked = true; + } + + // listen for checkbox to turn autoplay on and off + let cookie = 'autoplay' + if (isPlaylist) + cookie += '_' + playlist_id; + + checkbox.addEventListener( 'change', function() { + if(this.checked) { + autoplayEnabled = 1; + document.cookie = cookie + '=1; SameSite=Strict'; + } else { + autoplayEnabled = 0; + document.cookie = cookie + '=0; SameSite=Strict'; + } + }); + + if(!playability_error){ + // play the video if autoplay is on + if(autoplayEnabled){ + video.play(); + } + } + + // determine next video url + let nextVideoUrl; + if (isPlaylist) { + let currentIndex = data.playlist['current_index']; + if (data.playlist['current_index']+1 == data.playlist['items'].length) + nextVideoUrl = null; + else + nextVideoUrl = data.playlist['items'][data.playlist['current_index']+1]['url']; + + // scroll playlist to proper position + // item height + gap == 100 + let pl = document.querySelector('.playlist-videos'); + pl.scrollTop = 100*currentIndex; + } else { + if (data.related.length === 0) + nextVideoUrl = null; + else + nextVideoUrl = data.related[0]['url']; + } + let nextVideoDelay = 1000; + + // go to next video when video ends + // https://stackoverflow.com/a/2880950 + if (nextVideoUrl) { + if(playability_error){ + videoEnded(); + } else { + video.addEventListener('ended', videoEnded, false); + } + function nextVideo(){ + if(autoplayEnabled){ + window.location.href = nextVideoUrl; + } + } + function videoEnded(e) { + window.setTimeout(nextVideo, nextVideoDelay); + } + } +} -- cgit v1.2.3-59-g8ed1b