Skip to content

Commit

Permalink
Cam output supported. Reinitailization fixed.
Browse files Browse the repository at this point in the history
  • Loading branch information
olir committed Mar 16, 2018
1 parent 5f2139b commit 077bcd5
Show file tree
Hide file tree
Showing 7 changed files with 142 additions and 69 deletions.
141 changes: 85 additions & 56 deletions src/main/java/de/screenflow/frankenstein/MovieProcessor.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@
import de.screenflow.frankenstein.task.TaskHandler;
import de.screenflow.frankenstein.task.TimeTaskHandler;
import de.screenflow.frankenstein.vf.VideoFilter;
import de.screenflow.frankenstein.vf.VideoStreamSource;
import de.screenflow.frankenstein.vf.input.VideoInput;

public class MovieProcessor {

Expand All @@ -51,6 +53,7 @@ public class MovieProcessor {
private double movie_w;

private static boolean stopped = false;
private boolean streamStopped = false;
int currentPos = 0;

private File ffmpeg;
Expand Down Expand Up @@ -115,7 +118,8 @@ public void processStreamFrame(ProcessingListener l) {
if (frame != null && !frame.empty()) {
Mat newFrame = frame;
for (VideoFilter filter : filters) {
// System.out.println("MovieProcessor processStreamFrame " + filter.getClass().getName());
// System.out.println("MovieProcessor processStreamFrame " +
// filter.getClass().getName());
newFrame = filter.process(newFrame, 1);
}
if (l != null)
Expand All @@ -129,48 +133,58 @@ public void processStreamFrame(ProcessingListener l) {
return;
}
}

public boolean process(ProcessingListener l) {
try {
System.out.print("doOutput=" + configuration.doOutput);

// 1. Detach Audio and Metadata from orginal video and store
// temporarily
if (configuration.doOutput && configuration.doInput) {
if (!new Task(this,
ffmpeg.getAbsolutePath() + " -y -i \"" + configuration.getInputVideo() + "\""
+ " -f ffmetadata " + tempMetadataFile.getAbsolutePath()
+ " -vn -ar 44100 -ac 2 -ab 192k -f mp3 -r 21 " + tempAudioFile.getAbsolutePath(),
new TimeTaskHandler(l, "Splitting Audio")).run())
return false;
streamStopped = !configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource);
if (!configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource)) {
System.out.print(
"doOutput=" + configuration.doOutput + " with source=" + (configuration.getSource() != null
? configuration.getSource().getClass().getName() : "none"));

// 1. Detach Audio and Metadata from orginal video and store
// temporarily
if (configuration.doOutput && configuration.doInput) {
if (!new Task(this,
ffmpeg.getAbsolutePath() + " -y -i \"" + configuration.getInputVideo() + "\""
+ " -f ffmetadata " + tempMetadataFile.getAbsolutePath()
+ " -vn -ar 44100 -ac 2 -ab 192k -f mp3 -r 21 " + tempAudioFile.getAbsolutePath(),
new TimeTaskHandler(l, "Splitting Audio")).run())
return false;

configuration.metadata.clear();
configuration.metadata.load(tempMetadataFile);
System.out
.print("Meta Data:\n===================\n" + configuration.metadata + "===================\n");

} else if (configuration.doOutput) {
// Create silent mp3
if (!new Task(this,
ffmpeg.getAbsolutePath() + " -y -f lavfi -i anullsrc=r=44100:cl=mono -t "
+ (movie_frameCount / movie_fps) + " -q:a 9 -acodec libmp3lame "
+ tempAudioFile.getAbsolutePath(),
new TimeTaskHandler(l, "Creating Silent Audio Audio")).run())
return false;
configuration.metadata.clear();
configuration.metadata.load(tempMetadataFile);
System.out.print(
"Meta Data:\n===================\n" + configuration.metadata + "===================\n");

} else if (configuration.doOutput) {
// Create silent mp3
if (!new Task(this,
ffmpeg.getAbsolutePath() + " -y -f lavfi -i anullsrc=r=44100:cl=mono -t "
+ (movie_frameCount / movie_fps) + " -q:a 9 -acodec libmp3lame "
+ tempAudioFile.getAbsolutePath(),
new TimeTaskHandler(l, "Creating Silent Audio Audio")).run())
return false;
}
}

// 2. Process Video without audio ()
System.out.print("Processing video: ");
Mat newFrame = null;
if (l != null)
l.taskUpdate(null, "Processing video");

int i = 0;
while (!stopped
&& (configuration.getSource().getFrames() < 0 || i < configuration.getSource().getFrames())) {
while (!stopped && (configuration.getSource().getFrames() < 0 || i < configuration.getSource().getFrames()
|| !streamStopped)) {
i++;
currentPos = configuration.getSource().seek(i, l);
frame = configuration.getSource().getFrame();
if (streamStopped) {
currentPos = configuration.getSource().seek(i, l);
frame = configuration.getSource().getFrame();
} else {
((VideoStreamSource)configuration.getSource()).pause();
frame = configuration.getSource().getFrame();
}
if (frame != null && !frame.empty()) {
if (!filters.isEmpty()) {
newFrame = frame;
Expand Down Expand Up @@ -213,35 +227,51 @@ public boolean process(ProcessingListener l) {

if (configuration.doOutput) {
new File(configuration.outputVideo).delete();
if (configuration.doInput) {
if (!new Task(this, ffmpeg.getAbsolutePath() + " -y -i " + tempVideoFile.getAbsolutePath() + " -i "
+ tempAudioFile.getAbsolutePath() + " -i " + tempMetadataFile.getAbsolutePath()
+ " -map_metadata 2" + " -c:a aac -c:v libx264 -q 17 \"" + configuration.outputVideo + '"',
new TimeTaskHandler(l, "Assembling Output")).run())
return false;
if (!configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource)) {
if (configuration.doInput) {
if (!new Task(this,
ffmpeg.getAbsolutePath() + " -y -i " + tempVideoFile.getAbsolutePath() + " -i "
+ tempAudioFile.getAbsolutePath() + " -i " + tempMetadataFile.getAbsolutePath()
+ " -map_metadata 2" + " -c:a aac -c:v libx264 -q 17 \""
+ configuration.outputVideo + '"',
new TimeTaskHandler(l, "Assembling Output")).run())
return false;
} else {
if (!new Task(this,
ffmpeg.getAbsolutePath() + " -y -i " + tempVideoFile.getAbsolutePath() + " -i "
+ tempAudioFile.getAbsolutePath() + " -c:a aac -c:v libx264 -q 17 "
+ configuration.outputVideo,
new TimeTaskHandler(l, "Processing Output")).run())
return false;
}
} else {
if (!new Task(this,
ffmpeg.getAbsolutePath() + " -y -i " + tempVideoFile.getAbsolutePath() + " -i "
+ tempAudioFile.getAbsolutePath() + " -c:a aac -c:v libx264 -q 17 "
+ configuration.outputVideo,
new TimeTaskHandler(l, "Processing Output")).run())
return false;
System.out.println("Renaming temp file "+tempVideoFile.getAbsolutePath());
tempVideoFile.renameTo(new File(configuration.outputVideo));
}
if (!new File(configuration.outputVideo).exists()) {
System.err.println("Missing output.");
File of = new File(configuration.outputVideo);
if (!of.exists()) {
System.err.println("Missing output "+of.getAbsolutePath());
return false;
} else {
System.out.println("Video created: " + of.getAbsolutePath());
}
tempVideoFile.delete();
tempAudioFile.delete();
tempMetadataFile.delete();
}
} finally {
closeInput();
// if (!configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource))
// closeInput();
closeOutput();
openOutput(null);
}
return true;
}

public void stopStream() {
streamStopped = true;
}

public static void stop() {
stopped = true;
}
Expand Down Expand Up @@ -374,20 +404,19 @@ public void handleLine(String line) {
if (s >= 0) {
inDirectShowSection = true;
}
}
else {
} else {
int s = line.indexOf("] DirectShow audio devices");
if (s >= 0) {
inDirectShowSection = false;
return;
}

s = line.indexOf("] \"");
if (s>0) {
if (s > 0) {
s++;
int e = line.indexOf("\"", s);
if (e>0) {
devices.add(line.substring(s,e));
if (e > 0) {
devices.add(line.substring(s, e));
}
}
}
Expand All @@ -410,20 +439,19 @@ public void handleLine(String line) {
if (s >= 0) {
inDirectShowSection = true;
}
}
else {
} else {
int s = line.indexOf("] DirectShow video devices");
if (s >= 0) {
inDirectShowSection = false;
return;
}

s = line.indexOf("] \"");
if (s>0) {
if (s > 0) {
s++;
int e = line.indexOf("\"", s);
if (e>0) {
devices.add(line.substring(s,e));
if (e > 0) {
devices.add(line.substring(s, e));
}
}
}
Expand All @@ -432,4 +460,5 @@ public void handleLine(String line) {
return devices;
return devices;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,8 @@ public void rActionVideoFileInput() {
}

@FXML public void rActionCameraInput() {
configuration.outputVideo = "stream.mp4";
tfPropertyOutputFile.setText(configuration.outputVideo);
removeTab(tabVideoFileInput);
removeTab(tabSlideshow);
removeTab(tabTestVideoGenerator);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import de.screenflow.frankenstein.MovieProcessor;
import de.screenflow.frankenstein.ProcessingListener;
import de.screenflow.frankenstein.vf.VideoFilter;
import de.screenflow.frankenstein.vf.VideoStreamSource;
import javafx.application.Platform;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
Expand Down Expand Up @@ -486,6 +487,14 @@ public void run() {
new Thread(r).start();
}

void startProcessing(Configuration configuration) {
runProcessing(configuration);
}

void stopProcessing() {
processor.stopStream();
}

@Override
public void videoStarted(int frames, double fps) {
this.fps = fps;
Expand Down Expand Up @@ -648,14 +657,31 @@ public void processingDone() {
});
}

boolean streamRunning = false;

@FXML
public void startButtonPressed() {
Configuration configuration = main.getConfiguration();
Platform.runLater(() -> {
startButton.setDisable(true);
configureButton.setDisable(true);
});
runProcessing(configuration);
if (streamRunning) {
streamRunning = false;
stopProcessing();
Platform.runLater(() -> {
startButton.setText("start");
});
} else {
Configuration configuration = main.getConfiguration();
Platform.runLater(() -> {
if (configuration.getSource() instanceof VideoStreamSource) {
startButton.setText("stop");
streamRunning = true;
} else
startButton.setDisable(true);
configureButton.setDisable(true);
});
if (streamRunning)
startProcessing(configuration);
else
runProcessing(configuration);
}
}

@FXML
Expand Down Expand Up @@ -739,7 +765,10 @@ public void prematureEnd(int realFrameCount) {
seekingErrorHandling = true;
seekPos = -1;
System.err.println("Warning: Premature end of source at frame " + realFrameCount);
new Error().printStackTrace();
if (realFrameCount==1) {
System.err.println("Fatal: If video source is a cam, close other instances first.");
System.exit(-1);
}
Platform.runLater(() -> {
this.slider.setValue(realFrameCount);
adjustVideoLengthDisplay();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package de.screenflow.frankenstein.vf;

public interface VideoStreamSource extends VideoSource {
void pause();
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,15 @@ public Mat configure(Mat sourceFrame) {

@Override
public Mat process(Mat sourceFrame, int frameId) {
System.out.println("???");
// System.out.println("???");

Rect roi = new Rect(0, 0, sourceFrame.cols(), sourceFrame.rows());
sourceFrame.copyTo(new Mat(newFrame, roi));
System.out.println("???"+newFrame.cols()+" "+sourceFrame.cols());
// System.out.println("???"+newFrame.cols()+" "+sourceFrame.cols());

roi = new Rect(sourceFrame.cols(), 0, sourceFrame.cols(), sourceFrame.rows());
sourceFrame.copyTo(new Mat(newFrame, roi));
System.out.println("???");
// System.out.println("???");

return newFrame;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@

import de.screenflow.frankenstein.ProcessingListener;
import de.screenflow.frankenstein.vf.VideoSource;
import de.screenflow.frankenstein.vf.VideoStreamSource;

public class CameraInput implements VideoSource {
public class CameraInput implements VideoStreamSource {
private final int id;

private VideoCapture movie = null;
Expand Down Expand Up @@ -72,6 +73,13 @@ public void run() {
}, 0, (int)(1000.0/fps));
}

public void pause() {
try {
Thread.sleep((int)(1000.0/fps));
} catch (InterruptedException e) {
}
}

@Override
public void close() {
if (movie != null)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
<Font name="System Bold" size="16.0" />
</font>
</Label>
<RadioButton fx:id="rVideoFileInput" onAction="#rActionVideoFileInput" text="Video File Input">
<RadioButton fx:id="rVideoFileInput" onAction="#rActionVideoFileInput" text="Video File">
<toggleGroup>
<ToggleGroup fx:id="InputGroup" />
</toggleGroup>
Expand All @@ -58,7 +58,7 @@
<Font size="16.0" />
</font>
</RadioButton>
<RadioButton fx:id="rCameraInput" onAction="#rActionCameraInput" text="Camera Input" toggleGroup="$InputGroup">
<RadioButton fx:id="rCameraInput" onAction="#rActionCameraInput" text="Camera (no audio)" toggleGroup="$InputGroup">
<font>
<Font size="16.0" />
</font>
Expand Down

0 comments on commit 077bcd5

Please sign in to comment.