ソースを参照

fix: init vap-player in src/lib

ryanemax 1 ヶ月 前
コミット
f7d38bb89d

+ 1 - 0
deploy.ps1

@@ -37,6 +37,7 @@
     "svgaplayerweb": "^2.3.2",
     "swiper": "^11.1.14",
     "tslib": "^2.3.0",
+    "video-animation-player": "^1.0.5",
     "zone.js": "~0.14.3"
   },
   "devDependencies": {

+ 4 - 2
projects/live-app/src/app/components/flutter-comp/flutter-comp.component.ts

@@ -1,6 +1,8 @@
 import { Component, Input, OnInit } from '@angular/core';
 import { MessageService } from '../../../services/message.service';
-declare const Vap: any;
+
+import {VapInit} from '../../../lib/vap-player/index'
+
 @Component({
   selector: 'app-flutter-comp',
   templateUrl: './flutter-comp.component.html',
@@ -22,7 +24,7 @@ export class FlutterCompComponent implements OnInit {
     this.msgSerrvice.isPlayer = true
     let dom = document.getElementById(this.domId);
     // console.log(dom);
-    let vapPlayer = new Vap.default({
+    let vapPlayer = VapInit({
       container: dom, // 要渲染的载体,dom元素
       src: param.video, // vap动画地址
       config: param.config, // 播放vap动画需要的 json文件。必填

+ 6 - 0
projects/live-app/src/app/components/gift-modal/gift-modal.component.scss

@@ -1,3 +1,9 @@
+.vap-warp,#vap-warp{
+  will-change: transform, opacity;
+  backface-visibility: hidden;
+  perspective: 1000px; /* 如果需要3D效果 */
+}
+
 .gift-modal {
   --height: 102.5641vw;
 }

+ 9 - 6
projects/live-app/src/app/components/gift-modal/gift-modal.component.ts

@@ -12,8 +12,8 @@ import {
 } from '../../../modules/ionic-standalone.modules';
 import { AccountService } from '../../../services/account.service';
 import { MessageService } from '../../../services/message.service';
-// import Vap from 'video-animation-player'
-declare const Vap: any;
+import {VapInit} from '../../../lib/vap-player/index'
+
 @Component({
   selector: 'app-gift-modal',
   templateUrl: './gift-modal.component.html',
@@ -60,6 +60,9 @@ export class GiftModalComponent implements OnInit {
   }
   async ngOnInit() {
     this.giftList = this.msgSerrvice.giftList;
+    setTimeout(()=>{
+      this.selectTab("all")
+    },200)
   }
   async openModal() {
     let uid: any = Parse.User.current()?.id;
@@ -142,16 +145,16 @@ export class GiftModalComponent implements OnInit {
   initPlayer(){
     console.log('送出礼物',this.currentGift);
     let json = this.currentGift.config
-    let dom = document.getElementById(this.domId);
-    let vapPlayer = new Vap.default({
+    let dom:any = document.getElementById(this.domId);
+    let vapPlayer:any = VapInit({
       container: dom, // 要渲染的载体,dom元素
       src: this.currentGift.video, // vap动画地址
       config: json, // 播放vap动画需要的 json文件。必填
       width: window.innerWidth, // 容器宽度
       height: window.innerHeight, // 容器高度
-      fps: 60, // 帧数,json文件中有这个视频的帧数的,可以看一下,
+      fps: json?.info?.fps || 30, // 帧数,json文件中有这个视频的帧数的,可以看一下,
       mute: false, // 静音
-      type: 1, // 组件基于type字段做了实例化缓存,不同的VAP实例应该使用不同的type值(如0、1、2等)
+      type: 0, // 组件基于type字段做了实例化缓存,不同的VAP实例应该使用不同的type值(如0、1、2等)
       loop: false, // 循环
       precache: true, // 预加载视频,下载完再播。小动画建议边下边播,大动画还是先下后播吧,因为太大了或者网络不好,会一卡一卡的。
       beginPoint: 0, // 起始播放时间点(单位秒),在一些浏览器中可能无效

+ 70 - 0
projects/live-app/src/lib/vap-player/gl-util.ts

@@ -0,0 +1,70 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+export function createShader(gl: WebGLRenderingContext, type: number, source: string) {
+  const shader = gl.createShader(type);
+  gl.shaderSource(shader, source);
+  gl.compileShader(shader);
+  // if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
+  //     console.error(gl.getShaderInfoLog(shader))
+  // }
+  return shader;
+}
+
+export function createProgram(gl: WebGLRenderingContext, vertexShader: WebGLShader, fragmentShader: WebGLShader) {
+  const program = gl.createProgram();
+  gl.attachShader(program, vertexShader);
+  gl.attachShader(program, fragmentShader);
+  gl.linkProgram(program);
+  // if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
+  //     console.error(gl.getProgramInfoLog(program))
+  // }
+  gl.useProgram(program);
+  return program;
+}
+
+export function createTexture(gl: WebGLRenderingContext, index: number, imgData?: TexImageSource) {
+  const texture = gl.createTexture();
+  const textrueIndex = gl.TEXTURE0 + index;
+  gl.activeTexture(textrueIndex);
+  gl.bindTexture(gl.TEXTURE_2D, texture);
+  // gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
+  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
+  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
+  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+  if (imgData) {
+    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, imgData);
+  }
+  return texture;
+}
+
+export function cleanWebGL(gl: WebGLRenderingContext, { shaders = [], program = null, textures = [], buffers = [] }) {
+  try {
+    textures.forEach((t) => {
+      gl.deleteTexture(t);
+    });
+    buffers.forEach((b) => {
+      gl.deleteBuffer(b);
+    });
+    if (program) {
+      shaders.forEach((shader) => {
+        gl.detachShader(program, shader);
+        gl.deleteShader(shader);
+      });
+      gl.deleteProgram(program);
+    }
+  } catch (e) {}
+}

+ 57 - 0
projects/live-app/src/lib/vap-player/index.ts

@@ -0,0 +1,57 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import { VapConfig } from './type';
+import WebglRenderVap from './webgl-render-vap';
+let isCanWebGL: boolean;
+/**
+ * @param options
+ * @constructor
+ * @return {null}
+ */
+export default function (options?: VapConfig) {
+  if (canWebGL()) {
+    return new WebglRenderVap(options);
+  } else {
+    throw new Error('your browser not support webgl');
+  }
+}
+export function VapInit(options?: VapConfig) {
+  if (canWebGL()) {
+    return new WebglRenderVap(options);
+  } else {
+    throw new Error('your browser not support webgl');
+  }
+}
+
+export function canWebGL(): boolean {
+  if (typeof isCanWebGL !== 'undefined') {
+    return isCanWebGL;
+  }
+  try {
+    // @ts-ignore
+    if (!window.WebGLRenderingContext) {
+      return false;
+    }
+    const canvas = document.createElement('canvas');
+    let context = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
+
+    isCanWebGL = !!context;
+    context = null;
+  } catch (err) {
+    isCanWebGL = false;
+  }
+  return isCanWebGL;
+}

+ 17 - 0
projects/live-app/src/lib/vap-player/type.ts

@@ -0,0 +1,17 @@
+export interface VapConfig {
+  container: HTMLElement;
+  src: string;
+  config: string | { [key: string]: any };
+  fps?: number;
+  width?: number;
+  height?: number;
+  // 循环播放
+  loop: boolean;
+  mute?: boolean;
+  precache?: boolean;
+  // 使用requestVideoFrameCallback对齐帧数据
+  accurate: boolean;
+  onLoadError?: (e: ErrorEvent) => void;
+  onDestory?: () => void;
+  [key: string]: any;
+}

+ 175 - 0
projects/live-app/src/lib/vap-player/vap-frame-parser.ts

@@ -0,0 +1,175 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+export default class FrameParser {
+  constructor(source, headData) {
+    this.config = source || {};
+    this.headData = headData;
+    this.frame = [];
+    this.textureMap = {};
+  }
+
+  public config;
+  private headData;
+  private frame;
+  public textureMap;
+  private canvas: HTMLCanvasElement;
+  private ctx: CanvasRenderingContext2D | null;
+  public srcData;
+
+  async init() {
+    // 判断是url还是json对象
+    if (/\/\/[-A-Za-z0-9+&@#/%?=~_|!:,.;]+[-A-Za-z0-9+&@#/%=~_|]\.json/.test(this.config)) {
+      this.config = await this.getConfigBySrc(this.config);
+    }
+    await this.parseSrc(this.config);
+    this.frame = this.config.frame || [];
+    return this;
+  }
+
+  initCanvas() {
+    if (!this.canvas) {
+      const canvas = document.createElement('canvas');
+      const ctx = canvas.getContext('2d');
+      canvas.style.display = 'none';
+      document.body.appendChild(canvas);
+      this.ctx = ctx;
+      this.canvas = canvas;
+    }
+  }
+
+  loadImg(url: string) {
+    return new Promise((resolve, reject) => {
+      // console.log('load img:', url)
+      const img = new Image();
+      img.crossOrigin = 'anonymous';
+      img.onload = function () {
+        resolve(this);
+      };
+      img.onerror = function (e) {
+        console.error('frame 资源加载失败:' + url);
+        reject(new Error('frame 资源加载失败:' + url));
+      };
+      img.src = url;
+    });
+  }
+
+  parseSrc(dataJson) {
+    const src = (this.srcData = {});
+    return Promise.all(
+      (dataJson.src || []).map(async (item) => {
+        item.img = null;
+        if (!this.headData[item.srcTag.slice(1, item.srcTag.length - 1)] && !this.headData[item.srcTag]) {
+          console.warn(`vap: 融合信息没有传入:${item.srcTag}`);
+        } else {
+          if (item.srcType === 'txt') {
+            if (this.headData['fontStyle'] && !item['fontStyle']) {
+              item['fontStyle'] = this.headData['fontStyle'];
+            }
+            item.textStr =
+              this.headData[item.srcTag] ||
+              item.srcTag.replace(/\[(.*)\]/, ($0, $1) => {
+                return this.headData[$1];
+              });
+            this.initCanvas();
+            item.img = this.makeTextImg(item);
+          } else if (item.srcType === 'img') {
+            item.imgUrl =
+              this.headData[item.srcTag] ||
+              item.srcTag.replace(/\[(.*)\]/, ($0, $1) => {
+                return this.headData[$1];
+              });
+            try {
+              item.img = await this.loadImg(item.imgUrl);
+            } catch (e) {}
+          }
+          if (item.img) {
+            src[item.srcId] = item;
+          }
+        }
+      })
+    ).then(() => {
+      if (this.canvas) {
+        this.canvas.parentNode?.removeChild(this.canvas);
+      }
+    });
+  }
+
+  /**
+   * 下载json文件
+   * @param jsonUrl json外链
+   * @returns {Promise}
+   */
+  getConfigBySrc(jsonUrl: string) {
+    return new Promise((resolve, reject) => {
+      const xhr = new XMLHttpRequest();
+      xhr.open('GET', jsonUrl, true);
+      xhr.responseType = 'json';
+      xhr.onload = function () {
+        if (xhr.status === 200 || (xhr.status === 304 && xhr.response)) {
+          const res = xhr.response;
+          resolve(res);
+        } else {
+          reject(new Error('http response invalid' + xhr.status));
+        }
+      };
+      xhr.send();
+    });
+  }
+
+  /**
+   * 文字转换图片
+   * @param item
+   */
+  makeTextImg(item) {
+    const { textStr, w, h, color, style, fontStyle } = item;
+    const ctx = this.ctx;
+    ctx.canvas.width = w;
+    ctx.canvas.height = h;
+    ctx.textBaseline = 'middle';
+    ctx.textAlign = 'center';
+    const getFontStyle = function () {
+      const fontSize = Math.min(w / textStr.length, h - 8); // 需留一定间隙
+      const font = [`${fontSize}px`, 'Arial'];
+      if (style === 'b') {
+        font.unshift('bold');
+      }
+      return font.join(' ');
+    };
+    if (!fontStyle) {
+      ctx.font = getFontStyle();
+      ctx.fillStyle = color;
+    } else if (typeof fontStyle == 'string') {
+      ctx.font = fontStyle;
+      ctx.fillStyle = color;
+    } else if (typeof fontStyle == 'object') {
+      ctx.font = fontStyle['font'] || getFontStyle();
+      ctx.fillStyle = fontStyle['color'] || color;
+    } else if (typeof fontStyle == 'function') {
+      ctx.font = getFontStyle();
+      ctx.fillStyle = color;
+      fontStyle.call(null, ctx, item);
+    }
+    ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height);
+    ctx.fillText(textStr, w / 2, h / 2);
+    // console.log('frame : ' + textStr, ctx.canvas.toDataURL('image/png'))
+    return ctx.getImageData(0, 0, w, h);
+  }
+  getFrame(frame) {
+    return this.frame.find((item) => {
+      return item.i === frame;
+    });
+  }
+}

+ 307 - 0
projects/live-app/src/lib/vap-player/video.ts

@@ -0,0 +1,307 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import { VapConfig } from './type';
+
+export default class VapVideo {
+  public options: VapConfig;
+  public requestAnim: (cb: any) => number;
+  public container: HTMLElement;
+  public video: HTMLVideoElement;
+  protected events: { [key: string]: Array<(...info: any[]) => void> } = {};
+  private _drawFrame: any;
+  protected animId: number;
+  protected useFrameCallback: boolean;
+  private firstPlaying = true;
+  private setBegin: boolean;
+  private customEvent: Array<string> = ['frame', 'percentage', ''];
+
+  setOptions(options: VapConfig) {
+    if (!options.container || !options.src) {
+      console.warn('[Alpha video]: options container and src cannot be empty!');
+    }
+    this.options = Object.assign(
+      {
+        // 视频url
+        src: '',
+        // 循环播放
+        loop: false,
+        fps: 20,
+        // 容器
+        container: null,
+        // 是否预加载视频资源
+        precache: false,
+        // 是否静音播放
+        mute: false,
+        config: '',
+        accurate: false,
+        // 帧偏移, 一般没用, 预留支持问题素材
+        offset: 0,
+      },
+      options
+    );
+    this.setBegin = true;
+    this.useFrameCallback = false;
+    this.container = this.options.container;
+    if (!this.options.src || !this.options.config || !this.options.container) {
+      console.error('参数出错:src(视频地址)、config(配置文件地址)、container(dom容器)');
+    }
+    return this;
+  }
+
+  precacheSource(source): Promise<string> {
+    const URL = (window as any).webkitURL || window.URL;
+    return new Promise((resolve, reject) => {
+      const xhr = new XMLHttpRequest();
+      xhr.open('GET', source, true);
+      xhr.responseType = 'blob';
+      xhr.onload = function () {
+        if (xhr.status === 200 || xhr.status === 304) {
+          const res = xhr.response;
+          if (/iphone|ipad|ipod/i.test(navigator.userAgent)) {
+            const fileReader = new FileReader();
+
+            fileReader.onloadend = function () {
+              const resultStr = fileReader.result as string;
+              const raw = atob(resultStr.slice(resultStr.indexOf(',') + 1));
+              const buf = Array(raw.length);
+              for (let d = 0; d < raw.length; d++) {
+                buf[d] = raw.charCodeAt(d);
+              }
+              const arr = new Uint8Array(buf);
+              const blob = new Blob([arr], { type: 'video/mp4' });
+              resolve(URL.createObjectURL(blob));
+            };
+            fileReader.readAsDataURL(xhr.response);
+          } else {
+            resolve(URL.createObjectURL(res));
+          }
+        } else {
+          reject(new Error('http response invalid' + xhr.status));
+        }
+      };
+      xhr.send();
+    });
+  }
+
+  initVideo() {
+    const options = this.options;
+    // 创建video
+    let video = this.video;
+    if (!video) {
+      video = this.video = document.createElement('video');
+    }
+    video.crossOrigin = 'anonymous';
+    video.autoplay = false;
+    video.preload = 'auto';
+    video.setAttribute('playsinline', '');
+    video.setAttribute('webkit-playsinline', '');
+    if (options.mute) {
+      video.muted = true;
+      video.volume = 0;
+    }
+    video.style.display = 'none';
+    video.loop = !!options.loop;
+    if (options.precache) {
+      this.precacheSource(options.src)
+        .then((blob) => {
+          console.log('sample precached.');
+          video.src = blob;
+          document.body.appendChild(video);
+        })
+        .catch((e) => {
+          console.error(e);
+        });
+    } else {
+      video.src = options.src;
+      // 这里要插在body上,避免container移动带来无法播放的问题
+      document.body.appendChild(this.video);
+      video.load();
+    }
+
+    this.firstPlaying = true;
+    if ('requestVideoFrameCallback' in this.video) {
+      this.useFrameCallback = !!this.options.accurate;
+    }
+    this.cancelRequestAnimation();
+
+    // 绑定事件
+    this.offAll();
+    ['playing', 'error', 'canplay'].forEach((item) => {
+      this.on(item, this['on' + item].bind(this));
+    });
+  }
+
+  drawFrame(_, _info){
+    this._drawFrame = this._drawFrame || this.drawFrame.bind(this);
+    if (this.useFrameCallback) {
+      // @ts-ignore
+      this.animId = this.video.requestVideoFrameCallback(this._drawFrame);
+    } else {
+      this.animId = this.requestAnim(this._drawFrame);
+    }
+  }
+
+  play() {
+    if (this.useFrameCallback) {
+      // @ts-ignore
+      this.animId = this.video.requestVideoFrameCallback(this.drawFrame.bind(this));
+    } else {
+      this.requestAnim = this.requestAnimFunc();
+    }
+
+    const prom = this.video && this.video.play();
+    if (prom && prom.then) {
+      prom.catch((e) => {
+        if (!this.video) {
+          return;
+        }
+        this.video.muted = true;
+        this.video.volume = 0;
+        this.video.play().catch((e) => {
+          this.trigger('error', e);
+        });
+      });
+    }
+  }
+
+  pause() {
+    this.video && this.video.pause();
+  }
+
+  setTime(t) {
+    if (this.video) {
+      this.video.currentTime = t;
+    }
+  }
+
+  requestAnimFunc() {
+    const { fps = 30 } = this.options;
+    if (window.requestAnimationFrame) {
+      let index = -1;
+      return (cb) => {
+        index++;
+        return requestAnimationFrame(() => {
+          if (!(index % (60 / fps))) {
+            return cb();
+          }
+          this.animId = this.requestAnim(cb);
+        });
+      };
+    }
+    return function (cb) {
+      return window.setTimeout(cb, 1000 / fps);
+    };
+  }
+
+  cancelRequestAnimation() {
+    if (!this.animId) {
+      return;
+    }
+    if (this.useFrameCallback) {
+      try {
+        // @ts-ignore
+        this.video.cancelVideoFrameCallback(this.animId);
+      } catch (e) {
+        console.error(e);
+      }
+    } else if (window.cancelAnimationFrame) {
+      cancelAnimationFrame(this.animId);
+    } else {
+      clearTimeout(this.animId);
+    }
+    this.animId = 0;
+  }
+
+  clear() {
+    this.cancelRequestAnimation();
+  }
+
+  destroy() {
+    this.cancelRequestAnimation();
+    if (this.video) {
+      this.offAll();
+      this.video.parentNode && this.video.parentNode.removeChild(this.video);
+      this.video = null;
+    }
+    (this.options as any).onDestroy && (this.options as any).onDestroy();
+  }
+
+  on(event, callback: any) {
+    const cbs = this.events[event] || [];
+    cbs.push(callback);
+    this.events[event] = cbs;
+    if (this.customEvent.indexOf(event) === -1) {
+      this.video.addEventListener(event, callback);
+    }
+    return this;
+  }
+
+  once(event, callback: any) {
+    const once = (...e) => {
+      const cbs = this.events[event];
+      cbs.splice(cbs.indexOf(once), 1);
+      this.video.removeEventListener(event, once);
+      callback(...e);
+    };
+    return this.on(event, once);
+  }
+
+  trigger(eventName, ...e) {
+    try {
+      (this.events[eventName] || []).forEach((item) => {
+        item(...e);
+      });
+    } catch (e) {
+      console.error(e);
+    }
+  }
+
+  offAll() {
+    Object.keys(this.events).forEach((name) => {
+      const cbs = this.events[name];
+      if (cbs && cbs.length) {
+        cbs.forEach((cb) => {
+          this.video.removeEventListener(name, cb);
+        });
+      }
+    });
+    this.events = {};
+    return this;
+  }
+
+  onplaying() {
+    if (this.firstPlaying) {
+      this.firstPlaying = false;
+      if (!this.useFrameCallback) {
+        this.drawFrame(null, null);
+      }
+    }
+  }
+
+  oncanplay() {
+    const begin = (this.options as any).beginPoint;
+    if (begin && this.setBegin) {
+      this.setBegin = false;
+      this.video.currentTime = begin;
+    }
+  }
+
+  onerror(err) {
+    console.error('[Alpha video]: play error: ', err);
+    this.destroy();
+    this.options.onLoadError && this.options.onLoadError(err);
+  }
+}

+ 374 - 0
projects/live-app/src/lib/vap-player/webgl-render-vap.ts

@@ -0,0 +1,374 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import { VapConfig } from './type';
+import VapFrameParser from './vap-frame-parser';
+import * as glUtil from './gl-util';
+import VapVideo from './video';
+
+const PER_SIZE = 9;
+
+function computeCoord(x: number, y: number, w: number, h: number, vw: number, vh: number) {
+  // leftX rightX bottomY topY
+  return [x / vw, (x + w) / vw, (vh - y - h) / vh, (vh - y) / vh];
+}
+
+export default class WebglRenderVap extends VapVideo {
+  private canvas: HTMLCanvasElement;
+  private gl: WebGLRenderingContext;
+  private vertexShader: WebGLShader;
+  private fragmentShader: WebGLShader;
+  private program: WebGLProgram;
+  private textures: WebGLTexture[] = [];
+  private videoTexture: WebGLTexture;
+  private vertexBuffer: WebGLBuffer;
+  private vapFrameParser: VapFrameParser;
+  private imagePosLoc: WebGLUniformLocation;
+
+  constructor(options?: VapConfig) {
+    super();
+    if (options) {
+      this.play(options);
+    }
+  }
+
+  override play(options?: VapConfig) {
+    if (options) {
+      this.setOptions(options);
+    }
+    if (!this.options?.config) {
+      console.error(`options.config cannot be empty.`);
+      return this;
+    }
+    if (options) {
+      this.initVideo();
+      // 重新解析
+      this.vapFrameParser = new VapFrameParser(this.options.config, this.options);
+      this.vapFrameParser
+        .init()
+        .then(() => {
+          this.initWebGL();
+          this.initTexture();
+          this.initVideoTexture();
+          this.options.fps = this.vapFrameParser.config.info.fps || 30;
+          super.play();
+        })
+        .catch((e) => {
+          this.vapFrameParser = null;
+          console.error('[Alpha video] parse vap frame error.', e);
+          return this;
+        });
+    } else {
+      super.play();
+    }
+    return this;
+  }
+
+  initWebGL() {
+    let { canvas, gl, vertexShader, fragmentShader, program } = this;
+    const { width, height } = this.options;
+    if (!canvas) {
+      canvas = document.createElement('canvas');
+    }
+    const { vapFrameParser } = this;
+    const { w, h } = vapFrameParser.config.info;
+    canvas.width = width || w;
+    canvas.height = height || h;
+    this.container.appendChild(canvas);
+
+    if (!gl) {
+      gl = canvas.getContext('webgl') || (canvas.getContext('experimental-webgl') as WebGLRenderingContext);
+      gl.disable(gl.BLEND);
+      gl.blendFuncSeparate(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
+      gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
+    }
+    gl.viewport(0, 0, canvas.width, canvas.height);
+
+    if (!vertexShader) {
+      vertexShader = this.initVertexShader(gl);
+    }
+
+    if (fragmentShader && program) {
+      glUtil.cleanWebGL(gl, { program, shaders: [fragmentShader] });
+    }
+
+    const { srcData } = vapFrameParser;
+    fragmentShader = this.initFragmentShader(gl, Object.keys(srcData).length);
+    program = glUtil.createProgram(gl, vertexShader, fragmentShader);
+
+    this.canvas = canvas;
+    this.gl = gl;
+    this.vertexShader = vertexShader;
+    this.fragmentShader = fragmentShader;
+    this.program = program;
+    this.imagePosLoc = null;
+    return gl;
+  }
+
+  /**
+   * 顶点着色器
+   */
+  initVertexShader(gl: WebGLRenderingContext) {
+    return glUtil.createShader(
+      gl,
+      gl.VERTEX_SHADER,
+      `attribute vec2 a_position; // 接受顶点坐标
+             attribute vec2 a_texCoord; // 接受纹理坐标
+             attribute vec2 a_alpha_texCoord; // 接受纹理坐标
+             varying vec2 v_alpha_texCoord; // 接受纹理坐标
+             varying   vec2 v_texcoord; // 传递纹理坐标给片元着色器
+             void main(void){
+                gl_Position = vec4(a_position, 0.0, 1.0); // 设置坐标
+                v_texcoord = a_texCoord; // 设置纹理坐标
+                v_alpha_texCoord = a_alpha_texCoord; // 设置纹理坐标
+             }`
+    );
+  }
+
+  /**
+   * 片元着色器
+   */
+  initFragmentShader(gl: WebGLRenderingContext, textureSize) {
+    const bgColor = `vec4(texture2D(u_image_video, v_texcoord).rgb, texture2D(u_image_video,v_alpha_texCoord).r);`;
+    let sourceTexure = '';
+    let sourceUniform = '';
+
+    if (textureSize > 0) {
+      const bufferSize = textureSize * PER_SIZE;
+      const imgColor = [];
+      const samplers = [];
+      for (let i = 0; i < textureSize; i++) {
+        imgColor.push(
+          `if(ndx == ${i + 1}){
+                color = texture2D(u_image${i + 1},uv);
+            }`
+        );
+        samplers.push(`uniform sampler2D u_image${i + 1};`);
+      }
+
+      sourceUniform = `
+            ${samplers.join('\n')}
+            uniform float image_pos[${bufferSize}];
+            vec4 getSampleFromArray(int ndx, vec2 uv) {
+                vec4 color;
+                ${imgColor.join(' else ')}
+                return color;
+            }
+            `;
+      sourceTexure = `
+            vec4 srcColor,maskColor;
+            vec2 srcTexcoord,maskTexcoord;
+            int srcIndex;
+            float x1,x2,y1,y2,mx1,mx2,my1,my2; //显示的区域
+
+            for(int i=0;i<${bufferSize};i+= ${PER_SIZE}){
+                if ((int(image_pos[i]) > 0)) {
+                  srcIndex = int(image_pos[i]);
+    
+                    x1 = image_pos[i+1];
+                    x2 = image_pos[i+2];
+                    y1 = image_pos[i+3];
+                    y2 = image_pos[i+4];
+                    
+                    mx1 = image_pos[i+5];
+                    mx2 = image_pos[i+6];
+                    my1 = image_pos[i+7];
+                    my2 = image_pos[i+8];
+    
+    
+                    if (v_texcoord.s>x1 && v_texcoord.s<x2 && v_texcoord.t>y1 && v_texcoord.t<y2) {
+                        srcTexcoord = vec2((v_texcoord.s-x1)/(x2-x1),(v_texcoord.t-y1)/(y2-y1));
+                         maskTexcoord = vec2(mx1+srcTexcoord.s*(mx2-mx1),my1+srcTexcoord.t*(my2-my1));
+                         srcColor = getSampleFromArray(srcIndex,srcTexcoord);
+                         maskColor = texture2D(u_image_video, maskTexcoord);
+                         srcColor.a = srcColor.a*(maskColor.r);
+                      
+                         bgColor = vec4(srcColor.rgb*srcColor.a,srcColor.a) + (1.0-srcColor.a)*bgColor;
+                      
+                    }   
+                }
+            }
+            `;
+    }
+
+    const fragmentShader = `
+        precision lowp float;
+        varying vec2 v_texcoord;
+        varying vec2 v_alpha_texCoord;
+        uniform sampler2D u_image_video;
+        ${sourceUniform}
+        
+        void main(void) {
+            vec4 bgColor = ${bgColor}
+            ${sourceTexure}
+            gl_FragColor = bgColor;
+        }
+        `;
+    return glUtil.createShader(gl, gl.FRAGMENT_SHADER, fragmentShader);
+  }
+
+  initTexture() {
+    const { gl, vapFrameParser, textures } = this;
+    if (!vapFrameParser || !vapFrameParser.srcData) {
+      return;
+    }
+
+    const resources = vapFrameParser.srcData;
+    // 0分配给video
+    let i = 1;
+    for (const key in resources) {
+      const resource = resources[key];
+      const texture = textures[i - 1];
+      if (texture) {
+        // 复用
+        gl.activeTexture(gl.TEXTURE0 + i);
+        gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, resource.img);
+      } else {
+        this.textures.push(glUtil.createTexture(gl, i, resource.img));
+      }
+      const sampler = gl.getUniformLocation(this.program, `u_image${i}`);
+      gl.uniform1i(sampler, i);
+      this.vapFrameParser.textureMap[resource.srcId] = i++;
+    }
+  }
+
+  initVideoTexture() {
+    const { gl, vapFrameParser, program } = this;
+    if (!vapFrameParser || !vapFrameParser.config || !vapFrameParser.config.info) {
+      return;
+    }
+
+    // video texture
+    if (!this.videoTexture) {
+      this.videoTexture = glUtil.createTexture(gl, 0);
+    }
+
+    const sampler = gl.getUniformLocation(program, `u_image_video`);
+    gl.uniform1i(sampler, 0);
+    gl.activeTexture(gl.TEXTURE0);
+
+    const info = vapFrameParser.config.info;
+    const { videoW: vW, videoH: vH } = info;
+    const [rgbX, rgbY, rgbW, rgbH] = info.rgbFrame;
+    const [aX, aY, aW, aH] = info.aFrame;
+    const rgbCoord = computeCoord(rgbX, rgbY, rgbW, rgbH, vW, vH);
+    const aCoord = computeCoord(aX, aY, aW, aH, vW, vH);
+    const view = new Float32Array([
+      ...[-1, 1, rgbCoord[0], rgbCoord[3], aCoord[0], aCoord[3]],
+      ...[1, 1, rgbCoord[1], rgbCoord[3], aCoord[1], aCoord[3]],
+      ...[-1, -1, rgbCoord[0], rgbCoord[2], aCoord[0], aCoord[2]],
+      ...[1, -1, rgbCoord[1], rgbCoord[2], aCoord[1], aCoord[2]],
+    ]);
+
+    if (!this.vertexBuffer) {
+      this.vertexBuffer = gl.createBuffer();
+      gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
+    }
+    gl.bufferData(gl.ARRAY_BUFFER, view, gl.STATIC_DRAW);
+
+    // 将缓冲区对象分配给a_position变量、a_texCoord变量
+    const size = view.BYTES_PER_ELEMENT;
+    const aPosition = gl.getAttribLocation(program, 'a_position');
+    gl.enableVertexAttribArray(aPosition);
+    gl.vertexAttribPointer(aPosition, 2, gl.FLOAT, false, size * 6, 0); // 顶点着色器位置
+
+    const aTexCoord = gl.getAttribLocation(program, 'a_texCoord');
+    gl.enableVertexAttribArray(aTexCoord);
+    gl.vertexAttribPointer(aTexCoord, 2, gl.FLOAT, false, size * 6, size * 2); // rgb像素位置
+
+    const aAlphaTexCoord = gl.getAttribLocation(program, 'a_alpha_texCoord');
+    gl.enableVertexAttribArray(aAlphaTexCoord);
+    gl.vertexAttribPointer(aAlphaTexCoord, 2, gl.FLOAT, false, size * 6, size * 4); // rgb像素位置
+  }
+
+  override drawFrame(_, info) {
+    const { gl, vapFrameParser, video, options } = this;
+    if (!gl) {
+      super.drawFrame(_, info);
+      return;
+    }
+
+    const frame =
+      !options.loop && info?.presentedFrames > 0
+        ? info.presentedFrames - 1
+        : Math.round(video.currentTime * options.fps) + (options as any).offset;
+    // console.info('frame:', info.presentedFrames - 1, Math.round(this.video.currentTime * this.options.fps));
+    const frameData = vapFrameParser.getFrame(frame);
+
+    if (frameData?.obj) {
+      let posArr = [];
+      const { videoW: vW, videoH: vH, rgbFrame } = vapFrameParser.config.info;
+      frameData.obj.forEach((frame) => {
+        // 有可能用户没有传入src
+        const imgIndex = vapFrameParser.textureMap[frame.srcId];
+        if (imgIndex > 0) {
+          posArr[posArr.length] = imgIndex;
+          // frame坐标是最终展示坐标,这里glsl中计算使用视频坐标
+          const [rgbX, rgbY] = rgbFrame;
+          const [x, y, w, h] = frame.frame;
+          const [mX, mY, mW, mH] = frame.mFrame;
+          const coord = computeCoord(x + rgbX, y + rgbY, w, h, vW, vH);
+          const mCoord = computeCoord(mX, mY, mW, mH, vW, vH);
+          posArr = posArr.concat(coord).concat(mCoord);
+        }
+      });
+      if (posArr.length) {
+        this.imagePosLoc = this.imagePosLoc || gl.getUniformLocation(this.program, 'image_pos');
+        gl.uniform1fv(this.imagePosLoc, new Float32Array(posArr));
+      }
+    }
+
+    this.trigger('frame', frame + 1, frameData, vapFrameParser.config);
+    gl.clear(gl.COLOR_BUFFER_BIT);
+    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, video); // 指定二维纹理方式
+    gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
+    super.drawFrame(_, info);
+  }
+
+  // 清理数据,为下一次播放做准备
+  override clear() {
+    super.clear();
+    const { gl } = this;
+    // 清除界面,解决连续播放时,第一帧是上一个mp4最后一帧的问题
+    gl.clear(gl.COLOR_BUFFER_BIT);
+  }
+
+  // 销毁,释放webgl资源,销毁后调用play,资源会重新初始化
+  override destroy() {
+    super.destroy();
+    const { canvas, gl, vertexShader, fragmentShader, program, textures, videoTexture, vertexBuffer } = this;
+    if (canvas) {
+      canvas.parentNode && canvas.parentNode.removeChild(canvas);
+      this.canvas = null;
+    }
+    if (gl) {
+      glUtil.cleanWebGL(gl, {
+        program,
+        shaders: [vertexShader, fragmentShader],
+        textures: [...textures, videoTexture],
+        buffers: [vertexBuffer],
+      });
+    }
+
+    this.gl = null;
+    this.vertexShader = null;
+    this.fragmentShader = null;
+    this.program = null;
+    this.imagePosLoc = null;
+    this.vertexBuffer = null;
+    this.videoTexture = null;
+    this.textures = [];
+  }
+}

+ 7 - 5
projects/live-app/src/services/message.service.ts

@@ -7,7 +7,8 @@ import { Subject } from 'rxjs';
 import { Router } from '@angular/router';
 import { AiChatService } from './aichart.service';
 declare const AgoraRTM: any;
-declare const Vap: any;
+import {VapInit} from '../lib/vap-player/index'
+
 
 @Injectable({
   providedIn: 'root',
@@ -281,7 +282,7 @@ export class MessageService {
     this.isPlayer = true
     let dom = document.getElementById('vap-gift');
     console.log(dom);
-    let vapPlayer = new Vap.default({
+    let vapPlayer = VapInit({
       container: dom, // 要渲染的载体,dom元素
       src: giftModule.gift?.video, // vap动画地址
       config: giftModule.gift?.config, // 播放vap动画需要的 json文件。必填
@@ -289,7 +290,7 @@ export class MessageService {
       height: window.innerHeight, // 容器高度
       fps: 30, // 帧数,json文件中有这个视频的帧数的,可以看一下,
       mute: false, // 静音
-      type: 2, // 组件基于type字段做了实例化缓存,不同的VAP实例应该使用不同的type值(如0、1、2等)
+      type: 1, // 组件基于type字段做了实例化缓存,不同的VAP实例应该使用不同的type值(如0、1、2等)
       loop: false, // 循环
       precache: true, // 预加载视频,下载完再播。小动画建议边下边播,大动画还是先下后播吧,因为太大了或者网络不好,会一卡一卡的。
       beginPoint: 0, // 起始播放时间点(单位秒),在一些浏览器中可能无效
@@ -381,8 +382,9 @@ export class MessageService {
     });
   }
   /* 订阅消息 */
-  subscribeMessage(channelName: string, param?: any, deadline?: number) {
-    if (this.channelNameList[channelName]) return;
+  subscribeMessage(channelName: string, param?: any, deadline?: number):any {
+    if (this.channelNameList[channelName]) {return};
+
     return new Promise((resolve, reject) => {
       const options = {
         withMessage: param?.message ?? false, // message 事件

+ 3 - 0
tsconfig.json

@@ -5,6 +5,9 @@
   "compilerOptions": {
     "outDir": "./dist/out-tsc",
     "strict": true,
+    "strictPropertyInitialization": false,
+    "strictNullChecks": false,
+    "noImplicitAny": false,
     "noImplicitOverride": true,
     "noPropertyAccessFromIndexSignature": true,
     "noImplicitReturns": true,