|
|
home > developers > documentation > video keyframes
|
|
|
What is a keyframe?
Keyframes are still images extracted from video footage. These can be useful to distinguish videos, summarize them, and provide access points. Keyframes usually signal transition points, scene changes or significant image shifts in a video. |
The eVe 3 professional API can index and search video as well as photographs & graphics. The eVe engine is able to "watch" video and index a keyframe just as if it were any other still image. You can then search for videos with keyframes that match a query image. You can also use this to create storyboard representations or automatically annotate the video for later navigation.
The following code snippet shows how to extract keyframes from a video for indexing using eVe.
The procedure is fairly simple: When one runs the code below, the process is to open up the video file and count the frames. For each X frames (coming from the eVe.Properties file entry called "frameGrabberStep"), the frame is looked at. If it is Y% different from the last one (coming from the eVe.Properties file entry called "frameGrabberMinimumDistance" and using another multiplier called " frameGrabberAlpha" that should never be played with) then it is considered a keyframe possibility and is returned with the rest of the keyframe possibilities. Then, programmatically the next step could be to select or reject any of these returns. Once the returns are accepted, they then need to be analyzed in order to become a part of the database.
|
The following is sample code to locate and extract keyframes from a video clip:
package com.evisionglobal.eve.browser;
import com.evisionglobal.eve.*;
import com.evisionglobal.eve.kernel.*;
import com.evisionglobal.eve.drivers.*;
import java.awt.*;
import java.awt.event.*;
import java.lang.reflect.*;
import java.net.*;
import java.text.*;
import java.util.*;
import javax.swing.*;
public class VideoLoader extends JFrame
{
JViewport vp = null;
String filename = "";
XTextPane textPane = null;
JProgressBar progressBar = null;
Job job = null;
Vector keyframes = new Vector();
public VideoLoader(String filename)
{
DecimalFormat format = new DecimalFormat("0.000E0");
this.filename = filename;
int i = 0;
int xmax = 600;
int ymax = 300;
setTitle("Video Loader");
setSize(new Dimension(xmax,ymax));
JPanel contentPane = (JPanel) getContentPane();
contentPane.setLayout(new BorderLayout());
contentPane.setSize(new Dimension(xmax,ymax));
JScrollPane scroller = new JScrollPane();
scroller.setPreferredSize(new Dimension(xmax,ymax));
contentPane.add(scroller,BorderLayout.CENTER);
vp = scroller.getViewport();
textPane = new XTextPane();
textPane.setFont(new java.awt.Font("SansSerif", 1, 16));
vp.add(textPane);
progressBar = new JProgressBar();
contentPane.add(progressBar,BorderLayout.SOUTH);
pack();
validate();
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
Dimension frameSize = getSize();
if (frameSize.height > screenSize.height)
{
frameSize.height = screenSize.height;
}
if (frameSize.width > screenSize.width)
{
frameSize.width = screenSize.width;
}
setLocation((screenSize.width - frameSize.width) / 2, (screenSize.height - frameSize.height) / 2);
setup();
setVisible(true);
job = new Job();
job.start();
}
public void setup()
{
//
// setup - make sure we have a video panel set up
//
if (G.videoPanel == null)
G.rightTabPanel.add(new VideoPanel(),"Video");
//
// step 2 - delete all jpgs/edfs in temp directory
//
XP xp = new XP();
xp.deleteFileList(G.context.tempDirectory,"jpg");
xp.deleteFileList(G.context.tempDirectory,"edf");
}
public class Job extends Thread
{
FrameGrabber fg = null;
public Job()
{
fg = G.context.newFrameGrabber();
}
public void note(String message)
{
textPane.append((new Date()).toString() + ": " + message + "\n");
}
public void note(Exception e)
{
XP xp = new XP();
note(xp.parseException(e));
}
public void run()
{
int i = 0;
int totalFrames = 0;
DecimalFormat format = new DecimalFormat("0.000E0");
try
{
note("Starting...");
fg.open(filename);
note("Opened: " + filename);
totalFrames = fg.getTotalFrames();
note("Total Frames To Process: " + totalFrames);
progressBar.setMinimum(1);
progressBar.setMaximum(totalFrames);
progressBar.setStringPainted(true);
for (i = 1; i <= totalFrames; i = i + G.context.frameGrabberStep)
{
progressBar.setValue(i);
boolean keyframe = fg.gotKeyFrame(i);
long start = System.currentTimeMillis();
if (keyframe)
{
MediaObject eve = fg.getMediaObject(i);
keyframes.add(eve);
byte buffer[] = (byte[]) eve.getProperty("image");
note("Keyframe Found: " + i + " IM: " + buffer.length + " D: " + format.format(fg.getTotalDistance()));
}
long stop = System.currentTimeMillis();
note("Time: " + (stop - start));
}
fg.close();
progressBar.setValue(totalFrames);
G.videoReviewPanel.refresh(keyframes);
G.rightTabPanel.setSelectedComponent(G.videoPanel);
G.videoTabPanel.setSelectedComponent(G.videoReviewPanel);
XP xp = new XP();
xp.deleteFileList(G.context.tempDirectory,".jpg.xxx");
}
catch (Exception e)
{
note("Error:");
note(e);
}
}
}
}
|
|