<!DOCTYPE html>
<head>
  <meta name='viewport' content='width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0'>
  <title>Bahraman AR First Video</title>
  <!-- include three.js library -->
  <script src='js/three.js'></script>
  <!-- include jsartookit -->
  <script src='jsartoolkit5/artoolkit.min.js'></script>
  <script src='jsartoolkit5/artoolkit.api.js'></script>
  <!-- include threex.artoolkit -->
  <script src='threex/threex-artoolkitsource.js'></script>
  <script src='threex/threex-artoolkitcontext.js'></script>
  <script src='threex/threex-arbasecontrols.js'></script>
  <script src='threex/threex-armarkercontrols.js'></script>
</head>
<body style='margin : 0px; overflow: hidden; font-family: Monospace;'>
<video id='video' autoplay loop crossOrigin='anonymous' webkit-playsinline style='display:none' autoplay='false' muted='true'>
  <source src='video/bahraman.ogv' type='video/ogg; codecs="theora, vorbis"'>
  <source src='video/bahraman.mp4' type='video/mp4; codecs="avc1.42E01E, mp4a.40.2"'>
</video>
<!-- 
  Example created by Lee Stemkoski: https://github.com/stemkoski
  Based on the AR.js library and examples created by Jerome Etienne: https://github.com/jeromeetienne/AR.js/
-->
<script>

var scene, camera, renderer, clock, deltaTime, totalTime;

var arToolkitSource, arToolkitContext;

var markerRoot1;

var mesh1;

var videoPlaying = false; // Track video playback state

initialize();
animate();

function initialize()
{
  scene = new THREE.Scene();

  let ambientLight = new THREE.AmbientLight( 0xcccccc, 0.5 );
  scene.add( ambientLight );
        
  camera = new THREE.Camera();
  scene.add(camera);

  renderer = new THREE.WebGLRenderer({
    antialias : true,
    alpha: true
  });
  renderer.setClearColor(new THREE.Color('lightgrey'), 0)
  renderer.setSize( 640, 480 );
  renderer.domElement.style.position = 'absolute'
  renderer.domElement.style.top = '0px'
  renderer.domElement.style.left = '0px'
  document.body.appendChild( renderer.domElement );

  clock = new THREE.Clock();
  deltaTime = 0;
  totalTime = 0;
  
  //////////////////////////////////////////////////////////////
  // setup arToolkitSource
  //////////////////////////////////////////////////////////////

  arToolkitSource = new THREEx.ArToolkitSource({
    sourceType : 'webcam',
  });

  function onResize()
  {
    arToolkitSource.onResize()  
    arToolkitSource.copySizeTo(renderer.domElement)  
    if ( arToolkitContext.arController !== null )
    {
      arToolkitSource.copySizeTo(arToolkitContext.arController.canvas)  
    }  
  }

  arToolkitSource.init(function onReady(){
    onResize()
  });
  
  // handle resize event
  window.addEventListener('resize', function(){
    onResize()
  });
  
  //////////////////////////////////////////////////////////////
  // setup arToolkitContext
  //////////////////////////////////////////////////////////////  

  // create atToolkitContext
  arToolkitContext = new THREEx.ArToolkitContext({
    cameraParametersUrl: 'data/camera_para.dat',
    detectionMode: 'mono'
  });
  
  // copy projection matrix to camera when initialization complete
  arToolkitContext.init( function onCompleted(){
    camera.projectionMatrix.copy( arToolkitContext.getProjectionMatrix() );
  });

  //////////////////////////////////////////////////////////////
  // setup markerRoots
  //////////////////////////////////////////////////////////////

  // build markerControls
  markerRoot1 = new THREE.Group();
  scene.add(markerRoot1);
  let markerControls1 = new THREEx.ArMarkerControls(arToolkitContext, markerRoot1, {
    type: 'pattern', patternUrl: 'data/pat.patt',
  })

  let geometry1 = new THREE.PlaneBufferGeometry(2,2, 4,4);

  let video = document.getElementById( 'video' );
  let texture = new THREE.VideoTexture( video );
  texture.minFilter = THREE.LinearFilter;
  texture.magFilter = THREE.LinearFilter;
  texture.format = THREE.RGBFormat;
  let material1 = new THREE.MeshBasicMaterial( { map: texture } );
  
  mesh1 = new THREE.Mesh( geometry1, material1 );
  mesh1.rotation.x = -Math.PI/2;
  
  markerRoot1.add( mesh1 );

  markerControls1.addEventListener('markerFound', function() {
    video.muted = false;
    videoPlaying = true; // Set video playing state to true
  });
}


function update()
{
  // update artoolkit on every frame
  if ( arToolkitSource.ready !== false )
  {
    arToolkitContext.update( arToolkitSource.domElement );
    
    // Check if the marker is detected
    if (markerRoot1.visible) {
      // If the pattern is detected and the video is not playing, play it
      if (!videoPlaying) {
        video.play();
        videoPlaying = true;
      }
    } else {
      // If the pattern is not detected and the video is playing, pause it
      if (videoPlaying) {
        video.pause();
        videoPlaying = false;
      }
    }
  }
}


function render()
{
  renderer.render( scene, camera );
}


function animate()
{
  requestAnimationFrame(animate);
  deltaTime = clock.getDelta();
  totalTime += deltaTime;
  update();
  render();
}

</script>
</body>
</html>
AR.js: Play/Pause Video Based on Marker Detection

原文地址: https://www.cveoy.top/t/topic/PXv 著作权归作者所有。请勿转载和采集!

免费AI点我,无需注册和登录