Masked Watch

動画上のテキストや顔を検出してコメントを透過する

当前为 2019-08-23 提交的版本,查看 最新版本

// ==UserScript==
// @name        Masked Watch
// @namespace   https://github.com/segabito/
// @description 動画上のテキストや顔を検出してコメントを透過する
// @match       *://www.nicovideo.jp/*
// @match       *://live.nicovideo.jp/*
// @match       *://anime.nicovideo.jp/*
// @match       *://embed.nicovideo.jp/watch/*
// @match       *://sp.nicovideo.jp/watch/*
// @exclude     *://ads*.nicovideo.jp/*
// @exclude     *://www.nicovideo.jp/favicon.ico*
// @version     0.1.0
// @grant       none
// @author      名無しさん
// @license     public domain
// ==/UserScript==
/* eslint-disable */


// chrome://flags/#enable-experimental-web-platform-features
(() => {
  const PRODUCT = 'MaskedWatch';

  const monkey = (PRODUCT) => {
    'use strict';
    var VER = '0.1.0';
    const ENV = 'STABLE';


    const DEFAULT_CONFIG = {
      interval: 30,
      enabled: true,
      debug: false,
      faceDetection: true,
      textDetection: true,
      fastMode: true,
      width: 160,
      height: 90
    };
    const config = new class extends Function {
      toString() {
        return `
*** CONFIG MENU (設定はサービスごとに保存) ***
enabled: true,       // 有効/無効
debug: false,        // デバッグON/OFF
faceDetection: true, // 顔検出ON/OFF
textDetection: true, // テキスト検出ON/OFF
fastMode: false,     // false 精度重視 true 速度重視
width: 160,          // マスク用キャンバスの横解像度
height: 90           // マスク用キャンバスの縦解像度
`;
      }
    }, def = {};
    Object.keys(DEFAULT_CONFIG).sort().forEach(key => {
      const storageKey = `${PRODUCT}_${key}`;
      def[key] = {
        enumerable: true,
        get() {
          return localStorage.hasOwnProperty(storageKey) ?
            JSON.parse(localStorage[storageKey]) : DEFAULT_CONFIG[key];
        },
        set(value) {
          const currentValue = this[key];
          if (value === currentValue) {
            return;
          }
          localStorage[storageKey] = JSON.stringify(value);
          document.body.dispatchEvent(
            new CustomEvent(`${PRODUCT}-config.update`,
            {detail: {key, value, lastValue: currentValue}, bubbles: true, composed: true}
          ));
        }
      };
    });
    Object.defineProperties(config, def);

    window.MaskedWatch = { config };

    const createWorker = (func, options = {}) => {
      const src = `(${func.toString()})(self);`;
      const blob = new Blob([src], {type: 'text/javascript'});
      const url = URL.createObjectURL(blob);
      return new Worker(url, options);
    };

    const 業務 = function(self) {
      let canvas, ctx, fastMode, faceDetection, textDetection, debug, enabled;
      const init = params => {
        ({canvas} = params);
        ({fastMode, faceDetection, textDetection, debug, enabled} = params.config);
        ctx = canvas.getContext('2d');
        canvas.width = params.config.width;
        canvas.height = params.config.height;
        ctx.fillStyle = 'rgba(255, 255, 255, 1)';
        ctx.fillRect(0, 0, canvas.width, canvas.height);
      };

      const config = ({config}) => {
        ({fastMode, faceDetection, textDetection, debug, enabled} = config);
        canvas.width = config.width;
        canvas.height = config.height;
        ctx.fillStyle = 'rgba(255, 255, 255, 1)';
        ctx.fillRect(0, 0, canvas.width, canvas.height);
      };

      const faceDetector = new (self || window).FaceDetector({fastMode});
      const textDetector = new (self || window).TextDetector({fastMode});
      const detect = async ({bitmap}) => {
        const bitmapArea = bitmap.width * bitmap.height;
        const r = bitmap.width / canvas.width;

        // debug && console.time('detect');
        const tasks = [];
        faceDetection && (tasks.push(faceDetector.detect(bitmap)));
        textDetection && (tasks.push(textDetector.detect(bitmap)));
        const detected = (await Promise.all(tasks)).flat();
        // debug && console.timeLog('detect', 'detector.detect');

        ctx.beginPath();
        ctx.fillStyle = 'rgba(255, 255, 255, 0.3)';
        ctx.fillRect(0, 0, canvas.width, canvas.height);
        for (const d of detected) {
          let {x, y , width, height} = d.boundingBox;
          const area = width * height;
          const opacity = area / bitmapArea * 0.75;
          ctx.fillStyle = `rgba(255, 255, 255, ${opacity})`;

          x /= r; y /= r; width /= r; height /= r;
          if (d.landmarks) { // face
            ctx.clearRect(x - 5, y  - 8, width + 10, height + 16);
            ctx.fillRect (x - 5, y  - 8, width + 10, height + 16);
          } else {           // text
            ctx.clearRect(x - 5, y  - 2, width + 10, height +  4);
            ctx.fillRect (x - 5, y  - 2, width + 10, height +  4);
          }
          debug && d.rawValue && console.log('text: ', d.rawValue);
        }
        // debug && console.timeLog('detect', 'draw');

        const dataURL = await toDataURL(canvas);
        // debug && console.timeEnd('detect');
        return dataURL;
      };

      const reader = new FileReader();
      const toDataURL = async (canvas, type = 'image/png') => {
        const blob = await canvas.convertToBlob({type});
        return new Promise((ok, ng) => {
          reader.onload = () => { ok(reader.result); };
          reader.onerror = ng;
          reader.readAsDataURL(blob);
        });
      };

      self.onmessage = async e => {
        const {command, params} = e.data.body;
        switch (command) {
          case 'init':
            init(params);
            self.postMessage({body: {command: 'init', params: {}, status: 'ok'}});
            break;
          case 'config':
            config(params);
            break;
          case 'detect': {
            const dataURL = await detect(params);
            self.postMessage({body: {command: 'data', params: {dataURL}, status: 'ok'}});
          }
            break;
        }
      };
    };

    const createDetector = ({video, layer, interval}) => {
      const worker = createWorker(業務, {name: 'Facelook'});
      const width = 640, height = 360;
      const transferCanvas = new OffscreenCanvas(640, 360);
      const ctx = transferCanvas.getContext('2d', {alpha: false});

      const workCanvas = document.createElement('canvas');
      // for debug
      Object.assign(workCanvas.style, {
        border: '1px solid #888',
        left: 0,
        bottom: 0,
        position: 'fixed',
        zIndex: '100000',
        width: `${config.width}px`,
        height: `${config.height}px`,
        opacity: 0.8,
        background: '#333',
        pointerEvents: 'none',
        userSelect: 'none'
      });
      config.debug && document.body.append(workCanvas);
      const offscreenCanvas = workCanvas.transferControlToOffscreen();
      worker.postMessage({body:
        {command: 'init', params: {canvas: offscreenCanvas, config: {...config}}}
      }, [offscreenCanvas]);
      let currentTime = video.currentTime;

      let isBusy = true;
      worker.addEventListener('message', e => {
        const {command, params} = e.data.body;
        switch (command) {
          case 'init':
            console.log('initialized');
            isBusy = false;
            break;
          case 'data': {
            isBusy = false;
            const url = `url('${params.dataURL}')`;
            layer.style.maskImage = url;
            layer.style.webkitMaskImage = url;
          }
           break;
        }
      });
      const onTimer = () => {
        if (isBusy ||
            currentTime === video.currentTime ||
            document.visibilityState !== 'visible') {
          return;
        }

        currentTime = video.currentTime;
        const vw = video.videoWidth, vh = video.videoHeight;
        const ratio = Math.min(width / vw, height / vh);
        const dw = vw * ratio, dh = vh * ratio;

        ctx.drawImage(video, (width - dw) / 2, (height - dh) / 2, dw, dh);
        const bitmap = transferCanvas.transferToImageBitmap();
        isBusy = true;
        worker.postMessage({body: {command: 'detect', params: {bitmap}}}, [bitmap]);
      };
      let timer = setInterval(onTimer, interval);

      const start = () => timer = setInterval(onTimer, interval);
      const stop = () => {
        timer = clearInterval(timer);
        layer.style.maskImage = '';
        layer.style.webkitMaskImage = '';
      };

      window.addEventListener(`${PRODUCT}-config.update`, e => {
        worker.postMessage({body: {command: 'config', params: {config: {...config}}}});
        const {key, value} = e.detail;
        switch (key) {
          case 'enabled':
            value ? start() : stop();
            break;
          case 'debug':
            value ? document.body.append(workCanvas) : workCanvas.remove();
            break;
        }
      }, {passive: true});
      return { start, stop };
    };

    const vmap = new WeakMap();
    const watch = () => {
      if (!config.enabled || document.visibilityState !== 'visible') { return; }
      [...document.querySelectorAll('video, zenza-video')]
        .filter(video => !video.paused && !vmap.has(video))
        .forEach(video => {
          // 対応プレイヤー増やすならココ
          let layer, type = 'UNKNOWN';
          if (video.closest('#MainVideoPlayer')) {
            layer = document.querySelector('.CommentRenderer');
            type = 'NICO VIDEO';
          } else if (video.closest('#rootElementId')) {
            layer = document.querySelector('#comment canvas');
            type = 'NICO EMBED';
          } else if (video.closest('#watchVideoContainer')) {
            layer = document.querySelector('#jsPlayerCanvasComment canvas');
            type = 'NICO SP';
          } else if (video.closest('.zenzaPlayerContainer')) {
            layer = document.querySelector('.commentLayerFrame');
            type = 'ZenzaWatch';
          } else if (video.closest('[class*="__leo"]')) {
            layer = document.querySelector('#comment-layer-container canvas');
            type = 'NICO LIVE';
          }
          console.log('%ctype: "%s"', 'font-weight: bold', layer ? type : 'UNKNOWN???');
          layer && Object.assign(layer.style, {
            backgroundSize:     'contain',
            maskSize:           'contain',
            webkitMaskSize:     'contain',
            maskRepeat:         'no-repeat',
            webkitMaskRepeat:   'no-repeat',
            maskPosition:       'center center',
            webkitMaskPosition: 'center center'
          });
          vmap.set(video,
            layer ?
            createDetector({video: video.drawableElement || video, layer, interval: config.interval}) :
            type
          );
        });
    };
    setInterval(watch, 1000);

    window.addEventListener(`${PRODUCT}-config.update`, e => {
      const {key, value} = e.detail;
      config.debug && console.log('%cupdate config.%s = %s', 'color: blue;', key, value);
    }, {passive: true});

    // eslint-disable-next-line no-undef
    console.log('%cMasked Watch', 'font-size: 200%;', `ver ${VER}`, '\nconfig: ', JSON.stringify({...config}));
  };

  const loadGm = () => {
    const script = document.createElement('script');
    script.id = `${PRODUCT}Loader`;
    script.setAttribute('type', 'text/javascript');
    script.setAttribute('charset', 'UTF-8');
    script.append(`
    (() => {
      (${monkey.toString()})("${PRODUCT}");
    })();`);
    (document.head || document.documentElement).append(script);
  };

  loadGm();
})();

QingJ © 2025

镜像随时可能失效,请加Q群300939539或关注我们的公众号极客氢云获取最新地址