Add hooks for image processing to examples (#27)

Fixes #23.
pull/311/head
Peter Johnson 2019-01-02 23:12:32 -08:00 committed by GitHub
parent aaf6f88c9c
commit c702b9421a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 476 additions and 9 deletions

2
.gitignore vendored
View File

@ -16,5 +16,3 @@ deps/tools/setuidgids
*.sw?
*.debug
stage2/01-sys-tweaks/extfiles
deps/examples/cpp-multiCameraServer/main.cpp
deps/examples/java-multiCameraServer/src

3
deps/04-copy.sh vendored
View File

@ -7,9 +7,6 @@ mkdir -p ${DEST}
#
# examples
#
mkdir -p examples/java-multiCameraServer/src/main/java
cp allwpilib/cameraserver/multiCameraServer/src/main/java/Main.java examples/java-multiCameraServer/src/main/java/
cp allwpilib/cameraserver/multiCameraServer/src/main/native/cpp/main.cpp examples/cpp-multiCameraServer/
sh -c 'cd examples && zip -r - java-multiCameraServer' > ${DEST}/java-multiCameraServer.zip
sh -c 'cd examples && zip -r - cpp-multiCameraServer' > ${DEST}/cpp-multiCameraServer.zip

View File

@ -0,0 +1,224 @@
/*----------------------------------------------------------------------------*/
/* Copyright (c) 2018 FIRST. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
#include <cstdio>
#include <string>
#include <thread>
#include <vector>
#include <networktables/NetworkTableInstance.h>
#include <vision/VisionPipeline.h>
#include <vision/VisionRunner.h>
#include <wpi/StringRef.h>
#include <wpi/json.h>
#include <wpi/raw_istream.h>
#include <wpi/raw_ostream.h>
#include "cameraserver/CameraServer.h"
/*
JSON format:
{
"team": <team number>,
"ntmode": <"client" or "server", "client" if unspecified>
"cameras": [
{
"name": <camera name>
"path": <path, e.g. "/dev/video0">
"pixel format": <"MJPEG", "YUYV", etc> // optional
"width": <video mode width> // optional
"height": <video mode height> // optional
"fps": <video mode fps> // optional
"brightness": <percentage brightness> // optional
"white balance": <"auto", "hold", value> // optional
"exposure": <"auto", "hold", value> // optional
"properties": [ // optional
{
"name": <property name>
"value": <property value>
}
]
}
]
}
*/
#ifdef __RASPBIAN__
static const char* configFile = "/boot/frc.json";
#else
static const char* configFile = "frc.json";
#endif
namespace {
unsigned int team;
bool server = false;
struct CameraConfig {
std::string name;
std::string path;
wpi::json config;
};
std::vector<CameraConfig> cameraConfigs;
wpi::raw_ostream& ParseError() {
return wpi::errs() << "config error in '" << configFile << "': ";
}
bool ReadCameraConfig(const wpi::json& config) {
CameraConfig c;
// name
try {
c.name = config.at("name").get<std::string>();
} catch (const wpi::json::exception& e) {
ParseError() << "could not read camera name: " << e.what() << '\n';
return false;
}
// path
try {
c.path = config.at("path").get<std::string>();
} catch (const wpi::json::exception& e) {
ParseError() << "camera '" << c.name
<< "': could not read path: " << e.what() << '\n';
return false;
}
c.config = config;
cameraConfigs.emplace_back(std::move(c));
return true;
}
bool ReadConfig() {
// open config file
std::error_code ec;
wpi::raw_fd_istream is(configFile, ec);
if (ec) {
wpi::errs() << "could not open '" << configFile << "': " << ec.message()
<< '\n';
return false;
}
// parse file
wpi::json j;
try {
j = wpi::json::parse(is);
} catch (const wpi::json::parse_error& e) {
ParseError() << "byte " << e.byte << ": " << e.what() << '\n';
return false;
}
// top level must be an object
if (!j.is_object()) {
ParseError() << "must be JSON object\n";
return false;
}
// team number
try {
team = j.at("team").get<unsigned int>();
} catch (const wpi::json::exception& e) {
ParseError() << "could not read team number: " << e.what() << '\n';
return false;
}
// ntmode (optional)
if (j.count("ntmode") != 0) {
try {
auto str = j.at("ntmode").get<std::string>();
wpi::StringRef s(str);
if (s.equals_lower("client")) {
server = false;
} else if (s.equals_lower("server")) {
server = true;
} else {
ParseError() << "could not understand ntmode value '" << str << "'\n";
}
} catch (const wpi::json::exception& e) {
ParseError() << "could not read ntmode: " << e.what() << '\n';
}
}
// cameras
try {
for (auto&& camera : j.at("cameras")) {
if (!ReadCameraConfig(camera)) return false;
}
} catch (const wpi::json::exception& e) {
ParseError() << "could not read cameras: " << e.what() << '\n';
return false;
}
return true;
}
cs::UsbCamera StartCamera(const CameraConfig& config) {
wpi::outs() << "Starting camera '" << config.name << "' on " << config.path
<< '\n';
auto camera = frc::CameraServer::GetInstance()->StartAutomaticCapture(
config.name, config.path);
camera.SetConfigJson(config.config);
return camera;
}
// example pipeline
class MyPipeline : public frc::VisionPipeline {
public:
int val = 0;
void Process(cv::Mat& mat) override {
++val;
}
};
} // namespace
int main(int argc, char* argv[]) {
if (argc >= 2) configFile = argv[1];
// read configuration
if (!ReadConfig()) return EXIT_FAILURE;
// start NetworkTables
auto ntinst = nt::NetworkTableInstance::GetDefault();
if (server) {
wpi::outs() << "Setting up NetworkTables server\n";
ntinst.StartServer();
} else {
wpi::outs() << "Setting up NetworkTables client for team " << team << '\n';
ntinst.StartClientTeam(team);
}
// start cameras
std::vector<cs::VideoSource> cameras;
for (auto&& cameraConfig : cameraConfigs)
cameras.emplace_back(StartCamera(cameraConfig));
// start image processing on camera 0 if present
if (cameras.size() >= 1) {
std::thread([&] {
frc::VisionRunner<MyPipeline> runner(cameras[0], new MyPipeline(),
[&](MyPipeline &pipeline) {
// do something with pipeline results
});
/* something like this for GRIP:
frc::VisionRunner<MyPipeline> runner(cameras[0], new grip::GripPipeline(),
[&](grip::GripPipeline& pipeline) {
...
});
*/
runner.RunForever();
});
}
// loop forever
for (;;) std::this_thread::sleep_for(std::chrono::seconds(10));
}

View File

@ -0,0 +1,245 @@
/*----------------------------------------------------------------------------*/
/* Copyright (c) 2018 FIRST. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import edu.wpi.cscore.VideoSource;
import edu.wpi.first.cameraserver.CameraServer;
import edu.wpi.first.networktables.NetworkTableInstance;
import edu.wpi.first.vision.VisionPipeline;
import edu.wpi.first.vision.VisionThread;
import org.opencv.core.Mat;
/*
JSON format:
{
"team": <team number>,
"ntmode": <"client" or "server", "client" if unspecified>
"cameras": [
{
"name": <camera name>
"path": <path, e.g. "/dev/video0">
"pixel format": <"MJPEG", "YUYV", etc> // optional
"width": <video mode width> // optional
"height": <video mode height> // optional
"fps": <video mode fps> // optional
"brightness": <percentage brightness> // optional
"white balance": <"auto", "hold", value> // optional
"exposure": <"auto", "hold", value> // optional
"properties": [ // optional
{
"name": <property name>
"value": <property value>
}
]
}
]
}
*/
public final class Main {
private static String configFile = "/boot/frc.json";
@SuppressWarnings("MemberName")
public static class CameraConfig {
public String name;
public String path;
public JsonObject config;
}
public static int team;
public static boolean server;
public static List<CameraConfig> cameraConfigs = new ArrayList<>();
private Main() {
}
/**
* Report parse error.
*/
public static void parseError(String str) {
System.err.println("config error in '" + configFile + "': " + str);
}
/**
* Read single camera configuration.
*/
public static boolean readCameraConfig(JsonObject config) {
CameraConfig cam = new CameraConfig();
// name
JsonElement nameElement = config.get("name");
if (nameElement == null) {
parseError("could not read camera name");
return false;
}
cam.name = nameElement.getAsString();
// path
JsonElement pathElement = config.get("path");
if (pathElement == null) {
parseError("camera '" + cam.name + "': could not read path");
return false;
}
cam.path = pathElement.getAsString();
cam.config = config;
cameraConfigs.add(cam);
return true;
}
/**
* Read configuration file.
*/
@SuppressWarnings("PMD.CyclomaticComplexity")
public static boolean readConfig() {
// parse file
JsonElement top;
try {
top = new JsonParser().parse(Files.newBufferedReader(Paths.get(configFile)));
} catch (IOException ex) {
System.err.println("could not open '" + configFile + "': " + ex);
return false;
}
// top level must be an object
if (!top.isJsonObject()) {
parseError("must be JSON object");
return false;
}
JsonObject obj = top.getAsJsonObject();
// team number
JsonElement teamElement = obj.get("team");
if (teamElement == null) {
parseError("could not read team number");
return false;
}
team = teamElement.getAsInt();
// ntmode (optional)
if (obj.has("ntmode")) {
String str = obj.get("ntmode").getAsString();
if ("client".equalsIgnoreCase(str)) {
server = false;
} else if ("server".equalsIgnoreCase(str)) {
server = true;
} else {
parseError("could not understand ntmode value '" + str + "'");
}
}
// cameras
JsonElement camerasElement = obj.get("cameras");
if (camerasElement == null) {
parseError("could not read cameras");
return false;
}
JsonArray cameras = camerasElement.getAsJsonArray();
for (JsonElement camera : cameras) {
if (!readCameraConfig(camera.getAsJsonObject())) {
return false;
}
}
return true;
}
/**
* Start running the camera.
*/
public static VideoSource startCamera(CameraConfig config) {
System.out.println("Starting camera '" + config.name + "' on " + config.path);
VideoSource camera = CameraServer.getInstance().startAutomaticCapture(
config.name, config.path);
Gson gson = new GsonBuilder().create();
camera.setConfigJson(gson.toJson(config.config));
return camera;
}
/**
* Example pipeline.
*/
public static class MyPipeline implements VisionPipeline {
public int val;
@Override
public void process(Mat mat) {
val += 1;
}
}
/**
* Main.
*/
public static void main(String... args) {
if (args.length > 0) {
configFile = args[0];
}
// read configuration
if (!readConfig()) {
return;
}
// start NetworkTables
NetworkTableInstance ntinst = NetworkTableInstance.getDefault();
if (server) {
System.out.println("Setting up NetworkTables server");
ntinst.startServer();
} else {
System.out.println("Setting up NetworkTables client for team " + team);
ntinst.startClientTeam(team);
}
// start cameras
List<VideoSource> cameras = new ArrayList<>();
for (CameraConfig cameraConfig : cameraConfigs) {
cameras.add(startCamera(cameraConfig));
}
// start image processing on camera 0 if present
if (cameras.size() >= 1) {
VisionThread visionThread = new VisionThread(cameras.get(0),
new MyPipeline(), pipeline -> {
// do something with pipeline results
});
/* something like this for GRIP:
VisionThread visionThread = new VisionThread(cameras.get(0),
new GripPipeline(), pipeline -> {
...
});
*/
visionThread.start();
}
// loop forever
for (;;) {
try {
Thread.sleep(10000);
} catch (InterruptedException ex) {
return;
}
}
}
}

View File

@ -44,7 +44,7 @@ class CameraConfig: pass
team = None
server = False
cameras = []
cameraConfigs = []
"""Report parse error."""
def parseError(str):
@ -70,7 +70,7 @@ def readCameraConfig(config):
cam.config = config
cameras.append(cam)
cameraConfigs.append(cam)
return True
"""Read configuration file."""
@ -128,6 +128,8 @@ def startCamera(config):
camera.setConfigJson(json.dumps(config.config))
return camera
if __name__ == "__main__":
if len(sys.argv) >= 2:
configFile = sys.argv[1]
@ -146,8 +148,9 @@ if __name__ == "__main__":
ntinst.startClientTeam(team)
# start cameras
for camera in cameras:
startCamera(camera)
cameras = []
for cameraConfig in cameraConfigs:
cameras.append(startCamera(cameraConfig))
# loop forever
while True: