// ==UserScript==
// @name UnsafeYT → Tampermonkey port (Made by ChatGPT)
// @namespace unsafe-yt-userscript
// @license MIT
// @version 1.0.0
// @description Port of UnsafeYT content script to a Tampermonkey userscript (Made by ChatGPT)
// @author Literally ChatGPT
// @match https://www.youtube.com/*
// @match https://m.youtube.com/*
// @grant GM_setValue
// @grant GM_getValue
// @grant GM_registerMenuCommand
// @grant GM_notification
// @run-at document-idle
// ==/UserScript==
(() => {
'use strict';
/**************************************************************************
* NOTE:
* - This port is based on the background.js you provided.
* - chrome.runtime messaging and popup are removed; replaced with Tampermonkey
* menu commands (GM_registerMenuCommand) and storage (GM_getValue / GM_setValue).
* - Shaders are embedded below as strings so the script is self-contained.
* - If you have other files in the ZIP (e.g. malicious code), do NOT install them.
**************************************************************************/
// ---- state variables (kept from original) ----
let activeCanvas = null;
let activeGl = null;
let activeAudioCtx = null;
let activeSrcNode = null;
let activeGainNode = null;
let activeOutputGainNode = null;
let activeNotchFilters = [];
let resizeIntervalId = null;
let renderFrameId = null;
let isRendering = false;
let originalVideoContainerStyle = null;
let resizeCanvasListener = null;
let currentNode = null;
let currentUrl = window.location.href;
let currentToken = GM_getValue('unsafe_token', '') || "";
let savedDescription = "";
// ---- deterministic hash (kept) ----
function deterministicHash(s, prime = 31, modulus = Math.pow(2, 32)) {
let h = 0;
modulus = Math.floor(modulus);
for (let i = 0; i < s.length; i++) {
const charCode = s.charCodeAt(i);
h = (h * prime + charCode) % modulus;
if (h < 0) {
h += modulus;
}
}
return h / modulus;
}
// ---- generator for offset map (kept) ----
function _generateUnshuffleOffsetMapFloat32Array(seedToken, width, height) {
if (width <= 0 || height <= 0) {
throw new Error("Width and height must be positive integers.");
}
if (typeof seedToken !== 'string' || seedToken.length === 0) {
throw new Error("Seed string is required for deterministic generation.");
}
const totalPixels = width * height;
const startHash = deterministicHash(seedToken, 31, Math.pow(2, 32) - 1);
const stepSeed = seedToken + "_step";
const stepHash = deterministicHash(stepSeed, 37, Math.pow(2, 32) - 2);
const startAngle = startHash * Math.PI * 2.0;
const angleIncrement = stepHash * Math.PI / Math.max(width, height);
const indexedValues = [];
for (let i = 0; i < totalPixels; i++) {
const value = Math.sin(startAngle + i * angleIncrement);
indexedValues.push({ value: value, index: i });
}
indexedValues.sort((a, b) => a.value - b.value);
const pLinearized = new Array(totalPixels);
for (let k = 0; k < totalPixels; k++) {
const originalIndex = indexedValues[k].index;
const shuffledIndex = k;
pLinearized[originalIndex] = shuffledIndex;
}
const offsetMapFloats = new Float32Array(totalPixels * 2);
for (let oy = 0; oy < height; oy++) {
for (let ox = 0; ox < width; ox++) {
const originalLinearIndex = oy * width + ox;
const shuffledLinearIndex = pLinearized[originalLinearIndex];
const sy_shuffled = Math.floor(shuffledLinearIndex / width);
const sx_shuffled = shuffledLinearIndex % width;
const offsetX = (sx_shuffled - ox) / width;
const offsetY = (sy_shuffled - oy) / height;
const pixelDataIndex = (oy * width + ox) * 2;
offsetMapFloats[pixelDataIndex] = offsetX;
offsetMapFloats[pixelDataIndex + 1] = offsetY;
}
}
return offsetMapFloats;
}
// ---- utility: remove effects (kept with slight adapt) ----
function removeEffects() {
if (!isRendering) {
// still need to ensure audio context cleared even if not rendering
}
isRendering = false;
currentToken = GM_getValue('unsafe_token', '') || "";
if (activeCanvas) {
activeCanvas.remove();
activeCanvas = null;
}
if (resizeIntervalId !== null) {
clearInterval(resizeIntervalId);
resizeIntervalId = null;
}
if (renderFrameId !== null) {
cancelAnimationFrame(renderFrameId);
renderFrameId = null;
}
if (resizeCanvasListener) {
window.removeEventListener("resize", resizeCanvasListener);
resizeCanvasListener = null;
}
if (activeGl) {
const loseContext = activeGl.getExtension && activeGl.getExtension('WEBGL_lose_context');
if (loseContext) {
loseContext.loseContext();
}
activeGl = null;
}
const html5_video_container = document.getElementsByClassName("html5-video-container")[0];
if (html5_video_container && originalVideoContainerStyle) {
Object.assign(html5_video_container.style, originalVideoContainerStyle);
originalVideoContainerStyle = null;
}
if (activeAudioCtx) {
const video = document.querySelector(".video-stream");
if (video && activeSrcNode) {
try { activeSrcNode.disconnect(); } catch(e) {}
activeSrcNode = null;
}
if (activeGainNode) {
try { activeGainNode.disconnect(); } catch(e) {}
activeGainNode = null;
}
activeNotchFilters.forEach(filter => {
try { filter.disconnect(); } catch(e) {}
});
activeNotchFilters = [];
if (activeOutputGainNode) {
try { activeOutputGainNode.disconnect(); } catch(e) {}
activeOutputGainNode = null;
}
activeAudioCtx.close().then(() => {
console.log("AudioContext closed.");
activeAudioCtx = null;
if (video) {
const currentSrc = video.src;
video.src = '';
video.load();
video.src = currentSrc;
video.load();
console.log("Video source reloaded to restore audio.");
}
}).catch(e => console.error("Error closing AudioContext:", e));
currentNode = null;
}
console.log("Removed applied effects.");
}
// ---- token detection (kept) ----
async function getToken(steps) {
await new Promise(r => setTimeout(r, 200));
let description = document.getElementsByClassName("yt-core-attributed-string--link-inherit-color")[0];
if (typeof description != 'undefined' && description.innerHTML.includes("token:") && savedDescription != description.innerHTML) {
let parts = description.innerHTML.split("token:");
if (parts.length > 1) {
parts = parts[1].split("\n");
if (parts.length > 0 && currentToken != parts[0].trim()) {
savedDescription = description.innerHTML;
return parts[0].trim();
}
}
return await getToken(steps - 1);
}
if (typeof description != 'undefined' && savedDescription != description.innerHTML && description.innerHTML.length > 5 && !description.innerHTML.includes("token:")) {
savedDescription = description.innerHTML;
return "";
}
if (steps > 0)
return await getToken(steps - 1);
savedDescription = description ? description.innerHTML : "";
currentToken = "";
return "";
}
// ---- embed shaders (simple vertex + fragment that uses shuffle map) ----
// Vertex shader: pass-through
const VERTEX_SHADER_SRC = `#version 300 es
in vec2 a_position;
in vec2 a_texCoord;
out vec2 v_texCoord;
void main() {
gl_Position = vec4(a_position, 0.0, 1.0);
v_texCoord = a_texCoord;
}`;
// Fragment shader: sample video and sample shuffle map (RG channels store offsets).
// Note: using WebGL1 compatibility (no #version 300 es) — but will use float textures if available.
const FRAGMENT_SHADER_SRC = `#version 300 es
precision highp float;
in vec2 v_texCoord;
out vec4 fragColor;
uniform sampler2D u_sampler;
uniform sampler2D u_shuffle;
vec2 getNormal( vec2 uv ){
vec2 offset = vec2(0.0065,0.0065);
vec2 center = round((uv+offset)*80.0)/80.0;
return (center - (uv+offset))*80.0;
}
float getAxis( vec2 uv ){
vec2 normal = getNormal( uv );
float axis = abs(normal.x) > 0.435 ? 1.0 : 0.0;
return abs(normal.y) > 0.4 ? 2.0 : axis;
}
float getGrid( vec2 uv ){
float axis = getAxis( uv );
return axis > 0.0 ? 1.0 : 0.0;
}
vec4 getColor( vec2 uv ){
vec2 shuffle_sample = texture(u_shuffle, uv).rg;
vec2 base_new_uv = uv + shuffle_sample;
vec4 c = texture(u_sampler, base_new_uv);
return vec4(1.0 - c.rgb, c.a);
}
vec4 getGridFix( vec2 uv ){
vec2 normal = getNormal( uv );
vec4 base = getColor( uv );
vec4 outline = getColor( uv + normal*0.002 );
float grid = getGrid( uv );
return mix(base,outline,grid);
}
vec4 getSmoothed( vec2 uv, float power, float slice ){
vec4 result = vec4(0.0,0.0,0.0,0.0);
float PI = 3.14159265;
float TAU = PI*2.0;
for( float i=0.0; i < 8.0; i++ ){
float angle = ((i/8.0)*TAU) + (PI/2.0) + slice;
vec2 normal = vec2(sin(angle),cos(angle)*1.002);
result += getGridFix( uv + normal*power );
}
return result/8.0;
}
void main() {
vec2 uv = vec2(v_texCoord.x, -v_texCoord.y + 1.0);
float axis = getAxis( uv );
float grid = axis > 0.0 ? 1.0 : 0.0;
float slices[3] = float[3](0.0,0.0,3.14159265);
vec4 main = getGridFix( uv );
vec4 outline = getSmoothed( uv, 0.001, slices[int(axis)] );
main = mix(main,outline,grid);
fragColor = main;
//fragColor = vec4(slices[int(axis)],0.0,0.0,1.0);
//fragColor = vec4(normal,0.0,1.0);
//fragColor = vec4(grid,grid,grid , 1);
}
`;
// ---- helper: compile shader / create program ----
const compileShader = (gl, type, src) => {
if (!gl) return null;
const shader = gl.createShader(type);
if (!shader) {
console.error("Failed to create shader");
return null;
}
gl.shaderSource(shader, src);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error("Shader compile error:", gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
};
const createProgram = (gl, vsSrc, fsSrc) => {
if (!gl) return null;
const vs = compileShader(gl, gl.VERTEX_SHADER, vsSrc);
const fs = compileShader(gl, gl.FRAGMENT_SHADER, fsSrc);
if (!vs || !fs) return null;
const program = gl.createProgram();
gl.attachShader(program, vs);
gl.attachShader(program, fs);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error("Program link error:", gl.getProgramInfoLog(program));
gl.deleteProgram(program);
return null;
}
gl.useProgram(program);
gl.deleteShader(vs);
gl.deleteShader(fs);
return program;
};
// ---- main applyEffects (adapted) ----
async function applyEffects(seedToken) {
if (isRendering) return;
removeEffects();
currentToken = seedToken;
if (typeof currentToken !== 'string' || currentToken.length < 3) {
console.log("Invalid or empty token. Effects will not be applied.");
return;
}
console.log(`Applying effects with token: "${currentToken}"`);
const video = document.getElementsByClassName("video-stream")[0];
const html5_video_container = document.getElementsByClassName("html5-video-container")[0];
if (!video) {
console.error('No video found with class "video-stream"');
return;
}
video.crossOrigin = "anonymous";
// create canvas
activeCanvas = document.createElement("canvas");
activeCanvas.id = "glcanvas";
if (location.href.includes("m.youtube")) {
Object.assign(activeCanvas.style, {
position: "absolute",
top: "0%",
left: "50%",
transform: "translateY(0%) translateX(-50%)",
pointerEvents: "none",
zIndex: 9999,
touchAction: "none"
});
} else {
Object.assign(activeCanvas.style, {
position: "absolute",
top: "50%",
left: "50%",
transform: "translateY(-50%) translateX(-50%)",
pointerEvents: "none",
zIndex: 9999,
touchAction: "none"
});
}
// preserve original style then append
if (html5_video_container && !originalVideoContainerStyle) {
originalVideoContainerStyle = {
position: html5_video_container.style.position,
height: html5_video_container.style.height,
};
}
Object.assign(html5_video_container.style, {
position: "relative",
height: "100%",
});
html5_video_container.appendChild(activeCanvas);
activeGl = activeCanvas.getContext("webgl2", { alpha: false }) || activeCanvas.getContext("webgl", { alpha: false });
if (!activeGl) {
console.error("WebGL not supported");
removeEffects();
return;
}
// OES float ext for WebGL1:
let oesTextureFloatExt = null;
if (activeGl instanceof WebGLRenderingContext) {
oesTextureFloatExt = activeGl.getExtension('OES_texture_float');
if (!oesTextureFloatExt) {
console.warn('OES_texture_float extension not available. Float textures for shuffle map might not work.');
}
}
resizeCanvasListener = () => {
if (!activeCanvas || !video) return;
activeCanvas.width = video.offsetWidth;
activeCanvas.height = video.offsetHeight;
if (activeGl) {
activeGl.viewport(0, 0, activeGl.drawingBufferWidth, activeGl.drawingBufferHeight);
}
};
window.addEventListener("resize", resizeCanvasListener);
resizeCanvasListener();
resizeIntervalId = setInterval(resizeCanvasListener, 2500);
try {
const program = createProgram(activeGl, VERTEX_SHADER_SRC, FRAGMENT_SHADER_SRC);
if (!program) {
removeEffects();
return;
}
const posLoc = activeGl.getAttribLocation(program, "a_position");
const texLoc = activeGl.getAttribLocation(program, "a_texCoord");
const videoSamplerLoc = activeGl.getUniformLocation(program, "u_sampler");
const shuffleSamplerLoc = activeGl.getUniformLocation(program, "u_shuffle");
const u_videoSize = activeGl.getUniformLocation(program, "u_videoSize");
const u_mapSize = activeGl.getUniformLocation(program, "u_mapSize");
const quadVerts = new Float32Array([
-1, -1, 0, 0,
1, -1, 1, 0,
-1, 1, 0, 1,
-1, 1, 0, 1,
1, -1, 1, 0,
1, 1, 1, 1,
]);
const buf = activeGl.createBuffer();
activeGl.bindBuffer(activeGl.ARRAY_BUFFER, buf);
activeGl.bufferData(activeGl.ARRAY_BUFFER, quadVerts, activeGl.STATIC_DRAW);
activeGl.enableVertexAttribArray(posLoc);
activeGl.vertexAttribPointer(posLoc, 2, activeGl.FLOAT, false, 4 * Float32Array.BYTES_PER_ELEMENT, 0);
activeGl.enableVertexAttribArray(texLoc);
activeGl.vertexAttribPointer(texLoc, 2, activeGl.FLOAT, false, 4 * Float32Array.BYTES_PER_ELEMENT, 2 * Float32Array.BYTES_PER_ELEMENT);
const videoTex = activeGl.createTexture();
activeGl.bindTexture(activeGl.TEXTURE_2D, videoTex);
activeGl.texParameteri(activeGl.TEXTURE_2D, activeGl.TEXTURE_WRAP_S, activeGl.CLAMP_TO_EDGE);
activeGl.texParameteri(activeGl.TEXTURE_2D, activeGl.TEXTURE_WRAP_T, activeGl.CLAMP_TO_EDGE);
activeGl.texParameteri(activeGl.TEXTURE_2D, activeGl.TEXTURE_MIN_FILTER, activeGl.NEAREST);
activeGl.texParameteri(activeGl.TEXTURE_2D, activeGl.TEXTURE_MAG_FILTER, activeGl.NEAREST);
// generate shuffle map from token:
let actualSeedToken = currentToken;
let actualWidthFromPython = 80;
let actualHeightFromPython = 80;
let unshuffleMapFloats = null;
try {
unshuffleMapFloats = _generateUnshuffleOffsetMapFloat32Array(
actualSeedToken,
actualWidthFromPython,
actualHeightFromPython
);
} catch (error) {
console.error("Error generating unshuffle offset map (from seed):", error);
removeEffects();
return;
}
const shuffleTex = activeGl.createTexture();
activeGl.activeTexture(activeGl.TEXTURE1);
activeGl.bindTexture(activeGl.TEXTURE_2D, shuffleTex);
activeGl.texParameteri(activeGl.TEXTURE_2D, activeGl.TEXTURE_WRAP_S, activeGl.CLAMP_TO_EDGE);
activeGl.texParameteri(activeGl.TEXTURE_2D, activeGl.TEXTURE_WRAP_T, activeGl.CLAMP_TO_EDGE);
activeGl.texParameteri(activeGl.TEXTURE_2D, activeGl.TEXTURE_MIN_FILTER, activeGl.NEAREST);
activeGl.texParameteri(activeGl.TEXTURE_2D, activeGl.TEXTURE_MAG_FILTER, activeGl.NEAREST);
if (activeGl instanceof WebGL2RenderingContext) {
// try RG32F upload
try {
activeGl.texImage2D(
activeGl.TEXTURE_2D,
0,
activeGl.RG32F,
actualWidthFromPython,
actualHeightFromPython,
0,
activeGl.RG,
activeGl.FLOAT,
unshuffleMapFloats
);
} catch (e) {
// fallback to packed RGBA float if RG32F not allowed
const paddedData = new Float32Array(actualWidthFromPython * actualHeightFromPython * 4);
for (let i = 0; i < unshuffleMapFloats.length / 2; i++) {
paddedData[i * 4 + 0] = unshuffleMapFloats[i * 2 + 0];
paddedData[i * 4 + 1] = unshuffleMapFloats[i * 2 + 1];
paddedData[i * 4 + 2] = 0.0;
paddedData[i * 4 + 3] = 1.0;
}
activeGl.texImage2D(
activeGl.TEXTURE_2D,
0,
activeGl.RGBA,
actualWidthFromPython,
actualHeightFromPython,
0,
activeGl.RGBA,
activeGl.FLOAT,
paddedData
);
}
} else if (oesTextureFloatExt) {
const paddedData = new Float32Array(actualWidthFromPython * actualHeightFromPython * 4);
for (let i = 0; i < unshuffleMapFloats.length / 2; i++) {
paddedData[i * 4 + 0] = unshuffleMapFloats[i * 2 + 0];
paddedData[i * 4 + 1] = unshuffleMapFloats[i * 2 + 1];
paddedData[i * 4 + 2] = 0.0;
paddedData[i * 4 + 3] = 1.0;
}
activeGl.texImage2D(
activeGl.TEXTURE_2D,
0,
activeGl.RGBA,
actualWidthFromPython,
actualHeightFromPython,
0,
activeGl.RGBA,
activeGl.FLOAT,
paddedData
);
} else {
console.error("Float textures not supported by this browser's WebGL1 context.");
removeEffects();
return;
}
activeGl.clearColor(0.0, 0.0, 0.0, 1.0);
isRendering = true;
const render = () => {
if (!isRendering || !activeGl || !video || !activeCanvas) return;
if (video.readyState >= video.HAVE_CURRENT_DATA) {
activeGl.activeTexture(activeGl.TEXTURE0);
activeGl.bindTexture(activeGl.TEXTURE_2D, videoTex);
try {
activeGl.texImage2D(
activeGl.TEXTURE_2D,
0,
activeGl.RGBA,
activeGl.RGBA,
activeGl.UNSIGNED_BYTE,
video
);
} catch (e) {
// some browsers need texImage2D call adapted; swallow error
}
activeGl.uniform1i(videoSamplerLoc, 0);
activeGl.activeTexture(activeGl.TEXTURE1);
activeGl.bindTexture(activeGl.TEXTURE_2D, shuffleTex);
activeGl.uniform1i(shuffleSamplerLoc, 1);
// pass sizes
try {
activeGl.uniform2f(u_videoSize, activeCanvas.width, activeCanvas.height);
activeGl.uniform2f(u_mapSize, actualWidthFromPython, actualHeightFromPython);
} catch (e) {}
activeGl.clear(activeGl.COLOR_BUFFER_BIT);
activeGl.drawArrays(activeGl.TRIANGLES, 0, 6);
}
renderFrameId = requestAnimationFrame(render);
};
render();
} catch (error) {
console.error("Error during video effects setup:", error);
removeEffects();
return;
}
// ---- audio processing (kept mostly intact) ----
const AudioCtx = window.AudioContext || window.webkitAudioContext;
if (!AudioCtx) {
console.error("AudioContext not supported");
} else {
if (!activeAudioCtx) activeAudioCtx = new AudioCtx();
const video = document.querySelector(".video-stream");
if (video) {
if (!activeSrcNode) {
try { activeSrcNode = activeAudioCtx.createMediaElementSource(video); } catch (e) { activeSrcNode = null; }
}
const splitter = activeAudioCtx.createChannelSplitter(2);
const leftGain = activeAudioCtx.createGain();
const rightGain = activeAudioCtx.createGain();
leftGain.gain.value = 0.5;
rightGain.gain.value = 0.5;
const merger = activeAudioCtx.createChannelMerger(1);
activeOutputGainNode = activeAudioCtx.createGain();
const defaultOutputGain = 100.0;
activeOutputGainNode.gain.value = defaultOutputGain;
const filterFrequencies = [
200, 440, 6600, 15600, 5000, 6000, 6300, 8000, 10000, 12500, 14000, 15000, 15500, 15900, 16000
];
const filterEq = [3, 2, 1, 1, 20, 20, 5, 40, 40, 40, 40, 40, 1, 1, 40];
const filterCut = [1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1];
const numFilters = filterFrequencies.length;
activeNotchFilters = [];
for (let i = 0; i < numFilters; i++) {
const filter = activeAudioCtx.createBiquadFilter();
filter.type = "notch";
filter.frequency.value = filterFrequencies[i];
filter.Q.value = filterEq[i] * 3.5;
filter.gain.value = filterCut[i];
activeNotchFilters.push(filter);
}
if (activeSrcNode) activeSrcNode.connect(splitter);
splitter.connect(leftGain, 0);
splitter.connect(rightGain, 1);
leftGain.connect(merger, 0, 0);
rightGain.connect(merger, 0, 0);
currentNode = merger;
activeGainNode = activeAudioCtx.createGain();
activeGainNode.gain.value = 1.0;
currentNode = currentNode.connect(activeGainNode);
if (activeNotchFilters.length > 0) {
currentNode = currentNode.connect(activeNotchFilters[0]);
for (let i = 0; i < numFilters - 1; i++) {
currentNode = currentNode.connect(activeNotchFilters[i + 1]);
}
currentNode.connect(activeOutputGainNode);
} else {
currentNode.connect(activeOutputGainNode);
console.warn("No notch filters created.");
}
activeOutputGainNode.connect(activeAudioCtx.destination);
const handleAudioState = async () => {
if (!activeAudioCtx || activeAudioCtx.state === 'closed') return;
if (video.paused) {
if (activeAudioCtx.state === 'running') {
activeAudioCtx.suspend().catch(e => console.error("Error suspending AudioContext:", e));
}
} else {
if (activeAudioCtx.state === 'suspended') {
activeAudioCtx.resume().catch(e => console.error("Error resuming AudioContext:", e));
}
}
};
video.addEventListener("play", handleAudioState);
video.addEventListener("pause", handleAudioState);
if (!video.paused) handleAudioState();
} else {
console.error("Video element not found for audio effects.");
}
}
}
// ---- initialization logic (kept) ----
async function initializeScript() {
removeEffects();
let initialToken = await getToken(30);
if (initialToken == "") {
// If token not found in description, use stored token (if any)
initialToken = GM_getValue('unsafe_token', '') || '';
}
if (initialToken == "")
return;
console.log(`Initial token found: "${initialToken}"`);
let video = document.getElementsByClassName("video-stream")[0];
if (video && !video.paused) {
video.hooked = true;
await applyEffects(initialToken);
return;
}
currentToken = initialToken;
}
// ---- watch for video play (kept) ----
setInterval(async () => {
let video = document.getElementsByClassName("video-stream")[0];
if (typeof video != 'undefined' && !video.paused && video.hooked == null && !isRendering) {
video.hooked = true;
console.log("Video is playing, applying effects...");
await applyEffects(currentToken);
}
}, 500);
console.log("Trying to initialize (userscript) ...");
async function urlChanged() {
console.log('URL change detected!');
await initializeScript();
}
currentUrl = location.href.split("&")[0].split("#")[0];
setInterval(async () => {
var toCheck = location.href.split("&")[0].split("#")[0];
if (toCheck !== currentUrl) {
currentUrl = toCheck;
await urlChanged();
}
}, 500);
initializeScript();
// ---- Tampermonkey menu commands ----
GM_registerMenuCommand("UnsafeYT: Set token manually", async () => {
const t = prompt("Enter token (leave empty to clear):", GM_getValue('unsafe_token', '') || '');
if (t === null) return;
GM_setValue('unsafe_token', t || '');
currentToken = t || '';
GM_notification({ text: "Token saved. Use 'Turn On' to apply.", title: "UnsafeYT userscript" });
});
GM_registerMenuCommand("UnsafeYT: Turn On (apply token)", async () => {
currentToken = GM_getValue('unsafe_token', '') || currentToken;
if (!currentToken || currentToken.length < 3) {
alert("No valid token set. Use the 'Set token manually' menu or place 'token:...' in the video description.");
return;
}
await applyEffects(currentToken);
});
GM_registerMenuCommand("UnsafeYT: Turn Off (remove effects)", () => {
removeEffects();
});
GM_registerMenuCommand("UnsafeYT: Reload token from description", async () => {
removeEffects();
const t = await getToken(30);
if (t && t.length > 0) {
GM_setValue('unsafe_token', t);
currentToken = t;
await new Promise(r => setTimeout(r, 200));
await applyEffects(currentToken);
GM_notification({ text: "Reloaded token from description and applied.", title: "UnsafeYT userscript" });
} else {
GM_notification({ text: "No token found in description.", title: "UnsafeYT userscript" });
}
});
// ---- small safety precaution: don't auto-run if local page looks suspicious ----
// (kept minimal) - rely on user control through menu.
})();