Examples of Audio


Examples of com.google.gwt.media.client.Audio

        return clone;
    }

    @Override
    public Widget cloneDisplay(Map<String, Object> formData) {
        Audio au = Audio.createIfSupported();
        if (au == null) {
            return new Label(notSupported.getText());
        }
        populate(au);
        Object input = getInputValue(formData);
        if (au != null && input != null) {
            String url = input.toString();
            au.setSrc(url);
            if (url.endsWith(".mp3")) {
                au.getElement().setPropertyString("type", "application/mp3");
            } else if (url.endsWith(".ogg")) {
                au.getElement().setPropertyString("type", "application/ogg");
            } else if (url.endsWith(".mid")) {
                au.getElement().setPropertyString("type", "application/midi");
            }
        }
        super.populateActions(au.getElement());
        return au;
    }
View Full Code Here

Examples of com.vaadin.ui.Audio

                new ClassResource(getClass(), "bip.ogg") };
        final Resource[] s2 = {
                new ClassResource(getClass(), "toyphone_dialling.mp3"),
                new ClassResource(getClass(), "toyphone_dialling.ogg") };

        final Audio audio = new Audio();

        audio.setSources(s1);
        audio.setShowControls(true);
        audio.setHtmlContentAllowed(true);
        audio.setAltText("Can't <b>play</b> media");
        audio.setAutoplay(true);

        addComponent(audio);

        CheckBox checkBox = new CheckBox("Show controls",
                new MethodProperty<Boolean>(audio, "showControls"));
        addComponent(checkBox);
        checkBox = new CheckBox("HtmlContentAllowed",
                new MethodProperty<Boolean>(audio, "htmlContentAllowed"));
        addComponent(checkBox);
        checkBox = new CheckBox("muted", new MethodProperty<Boolean>(audio,
                "muted"));
        addComponent(checkBox);
        checkBox = new CheckBox("autoplay", new MethodProperty<Boolean>(audio,
                "autoplay"));
        addComponent(checkBox);

        Button b = new Button("Change", new Button.ClickListener() {

            @Override
            public void buttonClick(ClickEvent event) {
                audio.setSources(s2);
            }
        });
        addComponent(b);
        getLayout().setHeight("400px");
        getLayout().setExpandRatio(b, 1.0f);
View Full Code Here

Examples of com.vaadin.ui.Audio

                v.pause();
            }

        }));

        final Audio a = new Audio("audio");
        a.setSources(
                new ExternalResource(
                        "http://jonatan.virtuallypreinstalled.com/media/audio.mp3"),
                new ExternalResource(
                        "http://jonatan.virtuallypreinstalled.com/media/audio.ogg"));
        addComponent(a);

        addComponent(new Button("Play audio", new ClickListener() {

            @Override
            public void buttonClick(ClickEvent event) {
                a.play();
            }

        }));
        addComponent(new Button("Pause audio", new ClickListener() {

            @Override
            public void buttonClick(ClickEvent event) {
                a.pause();
            }

        }));
    }
View Full Code Here

Examples of com.volantis.map.sti.model.Audio

     *
     * @param prefix prefix of parameter where parameter can be found.
     * @return audio model object.
     */
    private Audio getAudio() throws ConverterException {
        Audio audio = new Audio();

        // Set standard media parameters
        setMediaParameters(audio);

        // Set audio size.
        String limitParameter = getParameterValue(ParameterNames.MAX_AUDIO_SIZE);

        if (limitParameter != null) {
            audio.setSizeLimit(Long.parseLong(limitParameter));
        }

        // Set audio specific parameters.
        audio.setCodec(getParameterValue(ParameterNames.AUDIO_CODEC));

        return audio;
    }
View Full Code Here

Examples of es.unizar.cps.tecnodiscap.util.Audio

   */
  private void initialize() {
    File file = new File(this.localizacionXML);
    procesadorXML = new ProcesadorXML(file, deviceManager);
    procesadorDatos = new ProcesadorDatos(this);
    audio = new Audio();
    initializeUser();
    this.setSize(Toolkit.getDefaultToolkit().getScreenSize());
    this.setExtendedState(this.getExtendedState() | JFrame.MAXIMIZED_BOTH);
    this.setJMenuBar(getMenuBarPrincipal());
    this.setContentPane(getContenedor());
View Full Code Here

Examples of es.unizar.cps.tecnodiscap.util.Audio

   */
  private void initialize() {
    File file = new File(this.localizacionXML);
    procesadorXML = new ProcesadorXML(file, deviceManager);
    procesadorDatos = new ProcesadorDatos(this);
    audio = new Audio();
    initializeUser();
    this.setSize(Toolkit.getDefaultToolkit().getScreenSize());
    this.setExtendedState(this.getExtendedState() | JFrame.MAXIMIZED_BOTH);
    this.setJMenuBar(getMenuBarPrincipal());
    this.setContentPane(getContenedor());
View Full Code Here

Examples of gwt.g2d.client.media.Audio

   
    add(new HTML("Music obtained from: <a href=\""
        + "http://www.vorbis.com/music/Epoq-Lepidoptera.ogg\">"
        + "http://www.vorbis.com/music/Epoq-Lepidoptera.ogg</a>"));
   
    Audio audio = new Audio();
    Source source = new Source("http://www.vorbis.com/music/Epoq-Lepidoptera.ogg");
    source.setType("audio/ogg");
    audio.addSource(source);
    audio.setControls(true);
    audio.setAutobuffer(true);
    add(audio);
  }
View Full Code Here

Examples of letweb.semanticum.tsd.Audio

      ArrayList<Long> beginTimeAnnot = new ArrayList<Long>();
      ArrayList<Long> endTimeAnnot = new ArrayList<Long>();
     
      StringBuffer document = new StringBuffer();
      Iterator audioIt = aJCas.getAnnotationIndex(Audio.type).iterator();
      Audio audio = (Audio) audioIt.next();
      Result result;
     
  //    getContext().getLogger().log("CF File is " + ConfigFile);
     
  //    if(audioIt.hasNext())
     
      ConcatAudioFileDataSource data = (ConcatAudioFileDataSource) cm.lookup("dataSource");
     
      for(String i: audio.getAudioPath().toArray())
        batch.add(new File(i));
     
      data.setBatchFiles(batch);
     
      recognizer.allocate();
     
      getContext().getLogger().log(Level.INFO, "Start recognition of " + audio.getAudioPath().toString(0));
      Long comp1 = System.currentTimeMillis();
     
      while((result = recognizer.recognize()) != null){
       
        ArrayList<TextAndTime> stt = TextAndTime.convert(result);
       
        if(stt != null)
          for(TextAndTime t: stt){
           
            String word = t.getWord();
           
            if(word.matches(TextAndTime.PATTERN_SIL))
              document.append(word);
            else{           
              beginAnnot.add(document.length());
              document.append(word);
              endAnnot.add(document.length());
             
              beginTimeAnnot.add(t.getIniTime());
              endTimeAnnot.add(t.getEndTime());
            }
            document.append(" ");
          }
       
      }
     
          Long comp2 = System.currentTimeMillis();
          getContext().getLogger().log(Level.INFO, "End recognition of " + audio.getAudioPath().toString(0));
          getContext().getLogger().log(Level.INFO, TextAndTime.getFormattedLong(comp2 - comp1)
              + " is total recognition time of this audio files " + audio.getAudioPath().toString(0));
          getContext().getLogger().log(Level.INFO, "Text " + document.toString());
         
          aJCas.setDocumentText(document.toString());
         
          Iterator<Long> itb = beginTimeAnnot.iterator(),
View Full Code Here

Examples of letweb.semanticum.tsd.Audio

  }

  @Override
  public void process(JCas aJCas) throws AnalysisEngineProcessException {
   
    Audio audio = new Audio(aJCas);
   
    // Prelievo dell'array di link dal aJCas
    Iterator linkIT = aJCas.getAnnotationIndex(LinkType.type).iterator();
    LinkType link = (LinkType) linkIT.next();
    // LinkType uri=null;
    // StringArray tempPath=null;
    // int i=0;
    // FSIndex linkT = aJCas.getAnnotationIndex(LinkType.type);
   
    // Assegnazione dell'array di link alla variabile locale
    StringArray videoUri = link.getArrayVideoUri();
   
    //Log degli uri dei video
    int i;
    for(i = 0; i < videoUri.size(); i++)
      getContext().getLogger().log(Level.WARNING, videoUri.get(i));
   
    // Ciclo per il download dei singoli video
    // Thread download = null;
    // for(i=0; i<videoUri.size(); i++){
    // //Istanziazione dei thread
    // download = new Thread( new Runnable() {
    // public void run() {
    // try {
    // //Assegnazione all'iesimo elemento il path in cui è stato scaricato
    // il file audio
    // tempPath.set(i,
    // AudioVideoProcessing.audioDownloadEndExtraction(uri.getSourceType(),
    // videoUri.get(i)));
    // } catch (IOException e) {
    // getContext().getLogger().log(Level.WARNING, "Error: " + e.getMessage());
    // } catch (InterruptedException e) {
    // getContext().getLogger().log(Level.WARNING, "Error: " + e.getMessage());
    // }
    // }
    // });
    // //Partenza di ogni singola istanza
    // download.start();
    // }
    // //Attesa della fine dei thread
    // try {
    // download.join();
    // } catch (InterruptedException e) {
    // getContext().getLogger().log(Level.WARNING, "Error: " + e.getMessage());
    // }
   
    //Inizializzazione dell'insieme di threads
    ExecutorService es = Executors.newCachedThreadPool();
    ArrayList<Callable<String>> tasks = new ArrayList<Callable<String>>();
    List<Future<String>> results = new ArrayList<Future<String>>();
   
    StringArray path = new StringArray(aJCas, videoUri.size());
    // Assegnazione dell'array di competenza
    audio.setAudioPath(path);
   
    for (i = 0; i < videoUri.size(); i++)
      tasks.add(new ThreadCallDownload(videoUri.get(i),link.getSourceType(),tp));
   
    try {
      //Metodo che esegue tutti i threads e restituisce i risultati una volta
      //terminati tutti i trheads
      results = es.invokeAll(tasks);
      for(i = 0; i < results.size(); i++){
        audio.setAudioPath(i, results.get(i).get());
        getContext().getLogger().log(Level.WARNING, "Path: " + results.get(i).get());
      }
     
    } catch (InterruptedException e) {
      getContext().getLogger().log(Level.WARNING, "InterruptedError: " + e.getMessage());
    } catch (ExecutionException e) {
      getContext().getLogger().log(Level.WARNING, "IOError/InterruptedError: " + e.getMessage());
    }
   
    // Aggiornamento degli indici
    audio.addToIndexes();
  }
View Full Code Here

Examples of org.internna.iwebmvc.model.core.Audio

    }

    @Override
    @SuppressWarnings("unchecked")
    public void setAsText(String text) throws IllegalArgumentException {
        Audio doc = (Audio) getValue();
        Properties properties = new Properties();
        try {
            properties.load(IOUtils.toInputStream(text));
            String uri = properties.getProperty("uri");
            Assert.isEncrypted(decipherer, uri);
            if (doc == null) doc = new Audio();
            doc.setUri(decipherer.decrypt(uri));
            doc.setMimeType(properties.getProperty("mimeType"));
            doc.setIdentifier(properties.getProperty("identifier"));
            doc.setCreated(dateFormat.parse(properties.getProperty("created")));
            doc.setSizeInBytes(Long.parseLong(properties.getProperty("sizeInBytes")));
        } catch (Exception ex) {
            if (log.isDebugEnabled()) log.debug("Could not completely bind [" + text + "] as an Audio: " + ex.getMessage());
        }
        setValue(doc);
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.