Skip to content

Commit

Permalink
Merge pull request #93 from McMaster-Baja-Racing/76-multiple-graphs-p…
Browse files Browse the repository at this point in the history
…lacement-of-objects

76 multiple graphs placement of objects
  • Loading branch information
gr812b committed Mar 28, 2024
2 parents 4c0e43c + ee4790b commit d6aad9e
Show file tree
Hide file tree
Showing 67 changed files with 1,536 additions and 548 deletions.
1 change: 0 additions & 1 deletion .gitignore
@@ -1,4 +1,3 @@

upload-dir/

.vscode/
12 changes: 10 additions & 2 deletions API/pom.xml
Expand Up @@ -28,7 +28,11 @@
<artifactId>opencsv</artifactId>
<version>5.8</version>
</dependency>

<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.8.0</version> <!-- replace with your desired version -->
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
Expand All @@ -37,7 +41,6 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
Expand All @@ -48,6 +51,11 @@
<artifactId>jSerialComm</artifactId>
<version>[2.0.0,3.0.0)</version>
</dependency>
<dependency>
<groupId>com.drewnoakes</groupId>
<artifactId>metadata-extractor</artifactId>
<version>2.19.0</version>
</dependency>
</dependencies>

<build>
Expand Down
153 changes: 134 additions & 19 deletions API/src/main/java/backend/API/FileUploadController.java
Expand Up @@ -18,8 +18,11 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
Expand Down Expand Up @@ -48,7 +51,7 @@
import backend.API.analyzer.Analyzer;

import backend.API.model.fileInformation;
import backend.API.model.fileList;
import backend.API.model.fileTimespan;

@Controller
public class FileUploadController {
Expand Down Expand Up @@ -98,25 +101,52 @@ public ResponseEntity<Resource> serveFile(HttpServletRequest request) {

// This is the method that returns information about all the files, to be used
// by fetch
// It returns an object of type fileList from the model folder
// It returns an object of type fileInformation from the model folder
@GetMapping("/files")
@ResponseBody
public ResponseEntity<fileList> listUploadedFiles() throws IOException {
public ResponseEntity<ArrayList<fileInformation>> listUploadedFiles() throws IOException {

// Set these headers so that you can access from LocalHost
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
responseHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");

fileList files = new fileList();
ArrayList<fileInformation> files = new ArrayList<fileInformation>();

// Get name, headers and size of each file
storageService.loadAll().forEach(path -> {
try {
// Get the path and filename of each file and print it
long size = storageService.loadAsResource(path.toString()).contentLength();
String[] headers = storageService.readHeaders(path.toString()).split(",");
files.addFile(new fileInformation(path.toString().replace("\\", "/"), headers, size));
files.add(new fileInformation(path.toString().replace("\\", "/"), headers, size));
} catch (IOException e) {
e.printStackTrace();
}
});

return ResponseEntity.ok().headers(responseHeaders).body(files);
}

// Returns the file information for all the files in a folder
@GetMapping("/files/folder/{foldername:.+}")
@ResponseBody
public ResponseEntity<ArrayList<fileInformation>> listFolderFiles(@PathVariable String foldername) throws IOException {

// Set these headers so that you can access from LocalHost
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
responseHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");

ArrayList<fileInformation> files = new ArrayList<fileInformation>();

// Get name, headers and size of each file
storageService.loadFolder(foldername).forEach(path -> {
try {
// Get the path and filename of each file and print it
long size = storageService.loadAsResource(path.toString()).contentLength();
String[] headers = storageService.readHeaders(path.toString()).split(",");
files.add(new fileInformation(path.toString().replace("\\", "/"), headers, size));
} catch (IOException e) {
e.printStackTrace();
}
Expand All @@ -143,6 +173,50 @@ public ResponseEntity<String> listFileInformation(@PathVariable String filename)
return ResponseEntity.ok().headers(responseHeaders).body(fileinfo);
}

// Returns the timespan of all the files in a type folder
@GetMapping("/timespan/folder/{foldername:.+}")
@ResponseBody
public ResponseEntity<ArrayList<fileTimespan>> listFolderTimespans(@PathVariable String foldername) throws IOException {

// Set these headers so that you can access from LocalHost
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
responseHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");

ArrayList<fileTimespan> timespans = new ArrayList<fileTimespan>();

Stream<Path> paths = storageService.loadFolder(foldername);

switch (foldername) {
case "csv":
// Holds the parent folder and the zero time
Object[] container = {null, null};
paths.forEach(path -> {
if (path.getParent() != null) {
// Updates the parent folder and zero time if the parent folder changes to avoid recalculating the zero time
if (container[0] != path.getParent()) {
container[0] = path.getParent();
container[1] = storageService.getZeroTime((Path) container[0]);
}
// Get the path and filename of each file and print it
LocalDateTime[] timespan = storageService.getTimespan(path.toString(), (LocalDateTime) container[1]);
timespans.add(new fileTimespan(path.toString().replace("\\", "/"), timespan[0], timespan[1]));
}
});
break;
case "mp4":
paths.forEach(path -> {
// Get the path and filename of each file and print it
LocalDateTime[] timespan = storageService.getTimespan(path.toString());
timespans.add(new fileTimespan(path.toString().replace("\\", "/"), timespan[0], timespan[1]));
});
break;
default: throw new IllegalArgumentException("Invalid folder name");
}

return ResponseEntity.ok().headers(responseHeaders).body(timespans);
}

// This is the be all end all method that should take in any number of file
// names and analyzers, plus live option and return a file
@GetMapping("/analyze")
Expand All @@ -164,34 +238,70 @@ public ResponseEntity<Resource> handleFileRequest(
// Set output files to empty string
outputFiles = new String[10];
}

// Then check if live is true, and set the options + files accordingly
if (liveOptions[0].equals("true")) {
// Maybe do the serial stuff here, but definitely look in live folder for data

// For all of the input and output files, add the root location to the front
for (int i = 0; i < inputFiles.length; i++) {
inputFiles[i] = storageService.getRootLocation().toString() + "/" + storageService.getTypeFolder(inputFiles[i]) + "/" + inputFiles[i];
}
for (int i = 0; i < outputFiles.length; i++) {
outputFiles[i] = storageService.getRootLocation().toString() + "/" + storageService.getTypeFolder(outputFiles[i]) + "/" + outputFiles[i];
}

// Then run the selected analyzer
if (analyzer != null && analyzer.length != 0 && analyzer[0] != null) {
try {
Analyzer.createAnalyzer(analyzer[0], inputFiles, inputColumns, outputFiles,
Arrays.copyOfRange(analyzer, 1, analyzer.length)).analyze();
Analyzer.createAnalyzer(analyzer[0], inputFiles, inputColumns, outputFiles,
(Object[]) Arrays.copyOfRange(analyzer, 1, analyzer.length)).analyze();
} catch (Exception e) {
System.out.println(e);
}
} else {
// If no analyzer is selected, only one file is selected, copy it
// storageService.copyFile(inputFiles[0], outputFiles[outputFiles.length - 1]);
outputFiles[outputFiles.length - 1] = "./upload-dir/" + inputFiles[0];
outputFiles[outputFiles.length - 1] = inputFiles[0];
}

// TODO: THIS SHOULD HAPPEN BEFORE RUNNING THE ANALYZER IN THE COMMON CASE
// Then check if live is true, and set the options + files accordingly
String fileOutputString = outputFiles[outputFiles.length - 1].substring(13, outputFiles[outputFiles.length - 1].length());

// print live options
System.out.println("Live options: " + liveOptions[0]);

if (liveOptions[0].equals("true")) {
outputFiles = new String[10];
// When live is true, we only want a certain amount of time from its timestamp
// Get the last timestamp, then subtract a certain amount of time, and use split analyzer between the two
int lastPoint = Integer.valueOf(storageService.getLast(fileOutputString));
int firstPoint = Math.max(0, lastPoint - 3000);

// print the two values
System.out.println("First point: " + firstPoint);
System.out.println("Last point: " + lastPoint);

Object[] extraValues = new Object[]{String.valueOf(firstPoint), String.valueOf(lastPoint)};
String[] lastFile = new String[]{fileOutputString};

try {
Analyzer.createAnalyzer("split", lastFile, inputColumns,
outputFiles, extraValues).analyze();
} catch (Exception e) {
System.out.println(e);
}

}

// Then return the final file, removing the prefix for upload dir
Resource file = storageService.loadAsResource(
outputFiles[outputFiles.length - 1].substring(13, outputFiles[outputFiles.length - 1].length()));
String filePath = outputFiles[outputFiles.length - 1];
Path path = Paths.get(filePath);
Path newPath = path.subpath(2, path.getNameCount());

Resource file = storageService.loadAsResource(newPath.toString());

// Set these headers so that you can access from LocalHost and download the file
HttpHeaders responseHeaders = new HttpHeaders();
Path absoluteFilePath = storageService
.load(outputFiles[outputFiles.length - 1].substring(13, outputFiles[outputFiles.length - 1].length()));
String relativePath = Paths.get("upload-dir").relativize(absoluteFilePath).toString();
Path absoluteFilePath = storageService.load(newPath.toString());
String relativePath = storageService.getRootLocation().relativize(absoluteFilePath).toString();
responseHeaders.add(HttpHeaders.CONTENT_DISPOSITION,
"attachment; filename=\"" + relativePath + "\"");
responseHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
Expand Down Expand Up @@ -270,8 +380,9 @@ public String handleFileUpload(@RequestParam("file") MultipartFile file,
}
if (filename.substring(filename.lastIndexOf(".") + 1).equals("bin")) {
storageService.store(file);
BinaryTOCSV.toCSV(storageService.load(filename).toAbsolutePath().toString(),
storageService.load("").toAbsolutePath().toString() + "\\", false);
String csvFilename = storageService.load(filename).toAbsolutePath().toString();
String csvOutputDir = storageService.load("").toAbsolutePath().toString() + "\\";
BinaryTOCSV.toCSV(csvFilename, csvOutputDir, false);
storageService.delete(filename);
} else {
storageService.store(file);
Expand All @@ -297,6 +408,10 @@ public ResponseEntity<String> handleFileUploadAPI(@RequestParam("file") Multipar
BinaryTOCSV.toCSV(storageService.load(filename).toAbsolutePath().toString(),
storageService.load("").toAbsolutePath().toString() + "\\", true);
storageService.delete(filename);
} else if (filename.substring(filename.lastIndexOf(".") + 1).toLowerCase().equals("mov")) {
storageService.store(file);
storageService.copyFile(filename, filename.substring(0, filename.lastIndexOf(".")) + ".mp4");
storageService.delete(filename);
} else {
storageService.store(file);
}
Expand Down
Expand Up @@ -5,8 +5,6 @@
import java.io.File;
import java.io.FileWriter;

import java.util.Date;

import backend.API.readwrite.Reader;
import backend.API.readwrite.CSVReader;

Expand Down
6 changes: 0 additions & 6 deletions API/src/main/java/backend/API/analyzer/Analyzer.java
Expand Up @@ -64,12 +64,6 @@ public ICSVWriter getWriter(String filePath) throws IOException {
// When a new analyzer is created, add it to this factory method
public static Analyzer createAnalyzer(String type, String[] inputFiles, String[] inputColumns, String[] outputFiles, Object... params) {
// Before every input and output file location, add the storage directory before it
for (int i = 0; i < inputFiles.length; i++) {
inputFiles[i] = "./upload-dir/" + inputFiles[i];
}
for (int i = 0; i < outputFiles.length; i++) {
outputFiles[i] = "./upload-dir/" + outputFiles[i];
}
switch (type) {
case "accelCurve":
if (outputFiles.length == 10) {
Expand Down
Expand Up @@ -23,7 +23,7 @@ public RDPCompressionAnalyzer(String[] inputFiles, String[] outputFiles, double
@Override
public void analyze() {

System.out.println("Compressing " + inputFiles[0]);
System.out.println("Compressing " + inputFiles[0] + " with epsilon " + epsilon + " to " + outputFiles[0]);

Reader r = new CSVReader(inputFiles[0]);
Writer w = new CSVWriter(outputFiles[0]);
Expand Down
7 changes: 5 additions & 2 deletions API/src/main/java/backend/API/binary_csv/Packet.java
Expand Up @@ -48,12 +48,15 @@ public Packet(byte[] data){

//if the packettype is 37, the data is a float
if (this.packetType == 37 || this.packetType == 36){

this.isFloat = true;
this.floatData = buffer.getFloat();
} else if (this.packetType >= 28 && this.packetType <= 33){
//strain from WFT
this.isFloat = true;
this.floatData = buffer.getFloat();
}
else {
this.intData = ((data[4] << 24) | (data[5] << 16) | (data[6] << 8) | data[7]);
this.intData = buffer.getInt();
}

}
Expand Down

0 comments on commit d6aad9e

Please sign in to comment.