convert plugins

This commit is contained in:
JellyBrick
2023-11-27 18:41:50 +09:00
parent 4fad456619
commit 3ffbfbe0e3
70 changed files with 1617 additions and 1836 deletions

View File

@ -1,140 +1,139 @@
import builder, { type SkipSilencesPluginConfig } from './index';
import { RendererContext } from '@/types/contexts';
export default builder.createRenderer(({ getConfig }) => {
let config: SkipSilencesPluginConfig;
import type { SkipSilencesPluginConfig } from './index';
let isSilent = false;
let hasAudioStarted = false;
let config: SkipSilencesPluginConfig;
const smoothing = 0.1;
const threshold = -100; // DB (-100 = absolute silence, 0 = loudest)
const interval = 2; // Ms
const history = 10;
const speakingHistory = Array.from({ length: history }).fill(0) as number[];
let isSilent = false;
let hasAudioStarted = false;
let playOrSeekHandler: (() => void) | undefined;
const smoothing = 0.1;
const threshold = -100; // DB (-100 = absolute silence, 0 = loudest)
const interval = 2; // Ms
const history = 10;
const speakingHistory = Array.from({ length: history }).fill(0) as number[];
const getMaxVolume = (analyser: AnalyserNode, fftBins: Float32Array) => {
let maxVolume = Number.NEGATIVE_INFINITY;
analyser.getFloatFrequencyData(fftBins);
let playOrSeekHandler: (() => void) | undefined;
for (let i = 4, ii = fftBins.length; i < ii; i++) {
if (fftBins[i] > maxVolume && fftBins[i] < 0) {
maxVolume = fftBins[i];
}
const getMaxVolume = (analyser: AnalyserNode, fftBins: Float32Array) => {
let maxVolume = Number.NEGATIVE_INFINITY;
analyser.getFloatFrequencyData(fftBins);
for (let i = 4, ii = fftBins.length; i < ii; i++) {
if (fftBins[i] > maxVolume && fftBins[i] < 0) {
maxVolume = fftBins[i];
}
}
return maxVolume;
};
return maxVolume;
};
const audioCanPlayListener = (e: CustomEvent<Compressor>) => {
const video = document.querySelector('video');
const { audioContext } = e.detail;
const sourceNode = e.detail.audioSource;
const audioCanPlayListener = (e: CustomEvent<Compressor>) => {
const video = document.querySelector('video');
const { audioContext } = e.detail;
const sourceNode = e.detail.audioSource;
// Use an audio analyser similar to Hark
// https://github.com/otalk/hark/blob/master/hark.bundle.js
const analyser = audioContext.createAnalyser();
analyser.fftSize = 512;
analyser.smoothingTimeConstant = smoothing;
const fftBins = new Float32Array(analyser.frequencyBinCount);
// Use an audio analyser similar to Hark
// https://github.com/otalk/hark/blob/master/hark.bundle.js
const analyser = audioContext.createAnalyser();
analyser.fftSize = 512;
analyser.smoothingTimeConstant = smoothing;
const fftBins = new Float32Array(analyser.frequencyBinCount);
sourceNode.connect(analyser);
analyser.connect(audioContext.destination);
sourceNode.connect(analyser);
analyser.connect(audioContext.destination);
const looper = () => {
setTimeout(() => {
const currentVolume = getMaxVolume(analyser, fftBins);
const looper = () => {
setTimeout(() => {
const currentVolume = getMaxVolume(analyser, fftBins);
let history = 0;
if (currentVolume > threshold && isSilent) {
// Trigger quickly, short history
for (
let i = speakingHistory.length - 3;
i < speakingHistory.length;
i++
) {
history += speakingHistory[i];
}
if (history >= 2) {
// Not silent
isSilent = false;
hasAudioStarted = true;
}
} else if (currentVolume < threshold && !isSilent) {
for (const element of speakingHistory) {
history += element;
}
if (history == 0 // Silent
&& !(
video && (
video.paused
|| video.seeking
|| video.ended
|| video.muted
|| video.volume === 0
)
)
) {
isSilent = true;
skipSilence();
}
let history = 0;
if (currentVolume > threshold && isSilent) {
// Trigger quickly, short history
for (
let i = speakingHistory.length - 3;
i < speakingHistory.length;
i++
) {
history += speakingHistory[i];
}
speakingHistory.shift();
speakingHistory.push(Number(currentVolume > threshold));
if (history >= 2) {
// Not silent
isSilent = false;
hasAudioStarted = true;
}
} else if (currentVolume < threshold && !isSilent) {
for (const element of speakingHistory) {
history += element;
}
looper();
}, interval);
};
if (history == 0 // Silent
looper();
const skipSilence = () => {
if (config.onlySkipBeginning && hasAudioStarted) {
return;
&& !(
video && (
video.paused
|| video.seeking
|| video.ended
|| video.muted
|| video.volume === 0
)
)
) {
isSilent = true;
skipSilence();
}
}
if (isSilent && video && !video.paused) {
video.currentTime += 0.2; // In s
}
};
speakingHistory.shift();
speakingHistory.push(Number(currentVolume > threshold));
playOrSeekHandler = () => {
hasAudioStarted = false;
skipSilence();
};
video?.addEventListener('play', playOrSeekHandler);
video?.addEventListener('seeked', playOrSeekHandler);
looper();
}, interval);
};
return {
async onLoad() {
config = await getConfig();
looper();
document.addEventListener(
'audioCanPlay',
audioCanPlayListener,
{
passive: true,
},
);
},
onUnload() {
document.removeEventListener(
'audioCanPlay',
audioCanPlayListener,
);
const skipSilence = () => {
if (config.onlySkipBeginning && hasAudioStarted) {
return;
}
if (playOrSeekHandler) {
const video = document.querySelector('video');
video?.removeEventListener('play', playOrSeekHandler);
video?.removeEventListener('seeked', playOrSeekHandler);
}
if (isSilent && video && !video.paused) {
video.currentTime += 0.2; // In s
}
};
});
playOrSeekHandler = () => {
hasAudioStarted = false;
skipSilence();
};
video?.addEventListener('play', playOrSeekHandler);
video?.addEventListener('seeked', playOrSeekHandler);
};
export const onRendererLoad = async ({ getConfig }: RendererContext<SkipSilencesPluginConfig>) => {
config = await getConfig();
document.addEventListener(
'audioCanPlay',
audioCanPlayListener,
{
passive: true,
},
);
};
export const onRendererUnload = () => {
document.removeEventListener(
'audioCanPlay',
audioCanPlayListener,
);
if (playOrSeekHandler) {
const video = document.querySelector('video');
video?.removeEventListener('play', playOrSeekHandler);
video?.removeEventListener('seeked', playOrSeekHandler);
}
};