微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

edu.wpi.first.wpilibj.CameraServer的实例源码

项目:2017    文件VisionProcessor.java   
/**
 * Creates the object and starts the camera server
 * 
 * @param usbPort
 *            The USB camera port number. '0' for default.
 * @param camera
 *            the brand / model of the camera
 */
public VisionProcessor (int usbPort,CameraModel camera)
{
    // Adds the camera to the cscore CameraServer,in order to grab the
    // stream.
    this.camera = CameraServer.getInstance()
            .startAutomaticCapture("Vision Camera",usbPort);

    // Based on the selected camera type,set the field of views and focal
    // length.
    this.cameraModel = camera;
    switch (this.cameraModel)
        {
        // case LIFECAM: //Not enough information to properly find this data.
        // see above.
        // this.horizontalFieldOfView =
        // this.verticalFieldOfView =
        // this.focalLength =
        // break;
        default: // Data will default to one to avoid any "divide by zero"
                 // errors.
            this.horizontalFieldOfView = 1;
            this.verticalFieldOfView = 1;
        }

}
项目:FRC6706_JAVA2017    文件VisionSubsystem2.java   
public void initDefaultCommand() {
    visionThread = new Thread(() -> {
    // Get the UsbCamera from CameraServer
    UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
    // Set the resolution
    camera.setResolution(640,480);

    // Get a CvSink. This will capture Mats from the camera
    CvSink cvSink = CameraServer.getInstance().getVideo();
    // Setup a CvSource. This will send images back to the Dashboard
    CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle",640,480);

    // Mats are very memory expensive. Lets reuse this Mat.
    Mat mat = new Mat();

    // This cannot be 'true'. The program will never exit if it is. This
    // lets the robot stop this thread when restarting robot code or
    // deploying.
    while (!Thread.interrupted()) {
        // Tell the CvSink to grab a frame from the camera and put it
        // in the source mat.  If there is an error notify the output.
        if (cvSink.grabFrame(mat) == 0) {
            // Send the output the error.
            outputStream.notifyError(cvSink.getError());
            // skip the rest of the current iteration
            continue;
        }
        // Put a rectangle on the image
        Imgproc.rectangle(mat,new Point(100,100),new Point(400,400),new Scalar(255,255,255),5);
        // Give the output stream a new image to display
        outputStream.putFrame(mat);
    }
});
visionThread.setDaemon(true);
visionThread.start();
  }
项目:2017-emmet    文件Robot.java   
public void robotinit() {

    RobotMap.init();

    drivetrain = new Drivetrain();
    climber = new climber();
    // fuel = new Fuel();
    gear = new Gear();

    oi = new OI();

    // initializes camera server
    server = CameraServer.getInstance();
    // cameraInit();
    // server.startAutomaticCapture(0);

    // autonomousCommand = (Command) new AutoMiddleHook();
    autonomousCommand = (Command) new AutoBaseline();

    // resets all sensors
    resetAllSensors();

    if (RobotConstants.isTestingEnvironment) updateTestingEnvironment();
    updateSensordisplay();
}
项目:R2017    文件CameraSet.java   
public void enable() {
    new Thread(() -> {
        cvSink = CameraServer.getInstance().getVideo(cam2);
        while (!Thread.interrupted()) {
            SmartDashboard.putString("Camera",cvSink.getSource().getName());

            cvSink.grabFrame(source);
            outputStream.putFrame(source);

            if (controls.getToggleCamera() && !buttonHeld && !isToggled) {
                isToggled = true;
                cvSink = CameraServer.getInstance().getVideo(cam2);
                System.out.println("toggled");
            } else if (controls.getToggleCamera() && !buttonHeld && isToggled) {
                isToggled = false;
                cvSink = CameraServer.getInstance().getVideo(cam1);
                System.out.println("toggled");
            }
            buttonHeld = controls.getToggleCamera();
        }
    }).start();
}
项目:2017-Steamworks    文件Vision.java   
/**
 * Initializes the cameras and starts the CameraServer to stream to grip /
 * SmartDashboard
 */
public void init() {
/*  gearCam.setFPS(fps);
    gearCam.setResolution(xResolution,yResolution);
    // Use zero exposure for bright vision targets and back light
    // gearCam.setExposureManual(0);

    shooterCam.setFPS(fps);
    shooterCam.setResolution(xResolution,yResolution);
    // Use zero exposure for bright vision targets and back light
    // shooterCam.setExposureManual(0);

    CameraServer.getInstance().addCamera(gearCam);

    CameraServer.getInstance().addCamera(shooterCam);

    CameraServer.getInstance().startAutomaticCapture(gearCam); */

    CameraServer.getInstance().startAutomaticCapture(0);

}
项目:2016    文件VisionProcessor.java   
/**
 * Creates the object and starts the camera server
 * 
 * @param usbPort
 *            The USB camera port number. '0' for default.
 * @param camera
 *            the brand / model of the camera
 */
public VisionProcessor (int usbPort,set the field of views and focal
    // length.
    this.cameraModel = camera;
    switch (this.cameraModel)
        {
        // case LIFECAM: //Not enough information to properly find this data.
        // see above.
        // this.horizontalFieldOfView =
        // this.verticalFieldOfView =
        // this.focalLength =
        // break;
        default: // Data will default to one to avoid any "divide by zero"
                 // errors.
            this.horizontalFieldOfView = 1;
            this.verticalFieldOfView = 1;
        }

}
项目:frc2017    文件SpatialAwarenessSubsystem.java   
public SpatialAwarenessSubsystem() {
  super("SpatialAwarenessSubsystem");

  cameraServer = CameraServer.getInstance();
  gearCamera = cameraServer.startAutomaticCapture(0);
  gearCamera.setResolution(IMG_WIDTH,IMG_HEIGHT);
  gearCamera.setFPS(30);
  gearCamera.setBrightness(7);
  gearCamera.setExposureManual(30);
  gearVideo = cameraServer.getVideo(gearCamera);

  visionProcessing = new VisionProcessing();

  leftUltrasonic = new AnalogInput(RobotMap.ANALOG_ULTRASONIC_LEFT);
  rightUltrasonic = new AnalogInput(RobotMap.ANALOG_ULTRASONIC_RIGHT);

  leftUltrasonicKalman = new SimpleKalman(1.4d,64d,leftUltrasonic.getAverageVoltage() * ULTRASONIC_VOLTS_TO_METERS);
  rightUltrasonicKalman = new SimpleKalman(1.4d,rightUltrasonic.getAverageVoltage() * ULTRASONIC_VOLTS_TO_METERS);

  navX = new AHRS(SPI.Port.kMXP);

  System.out.println("SpatialAwarenessSubsystem constructor finished");
}
项目:liastem    文件Robot.java   
public void operatorControl() {
    NIVision.IMAQdxStartAcquisition(session);

    /**
     * grab an image,draw the circle,and provide it for the camera server
     * which will in turn send it to the dashboard.
     */
    NIVision.Rect rect = new NIVision.Rect(10,10,100,100);

    while (isOperatorControl() && isEnabled()) {

        NIVision.IMAQdxGrab(session,frame,1);
        NIVision.imaqDrawShapeOnImage(frame,rect,DrawMode.DRAW_VALUE,ShapeMode.SHAPE_oval,0.0f);

        CameraServer.getInstance().setimage(frame);

        /** robot code here! **/
        Timer.delay(0.005);     // wait for a motor update time
    }
    NIVision.IMAQdxStopAcquisition(session);
}
项目:liastem    文件Robot.java   
@Override
public void robotinit() {
    RF = new RobotFunctions();
    chooser.addDefault("Default Auto",defaultAuto);
    chooser.addobject("My Auto",customAuto);
    SmartDashboard.putData("Auto modes",chooser);
      UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
        camera.setResolution(IMG_WIDTH,IMG_HEIGHT);

        visionThread = new VisionThread(camera,new grip(),pipeline -> {
            if (pipeline.equals(null)) {
                Rect r = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0));
                synchronized (imgLock) {
                    centerX = r.x + (r.width / 2);//Find the centre of the X Value
                    centerY = r.y + (r.height / 2);
                    rw = r.width;
                    rh = r.height;
                }
            }
        });
        visionThread.start();
}
项目:FRC-2016    文件DriveCam.java   
public void display() {
    NIVision.IMAQdxStartAcquisition(session);

    /**
     * grab an image,100);

    //while (teleop && enabled) {
    while(enabled) {
        NIVision.IMAQdxGrab(session,0.0f);

        CameraServer.getInstance().setimage(frame);

        /** robot code here! **/
        Timer.delay(0.005);     // wait for a motor update time
    }
    NIVision.IMAQdxStopAcquisition(session);
}
项目:2015-FRC-robot    文件CameraThread.java   
@Override
public void run() {
    if (sessionStarted) {
        NIVision.IMAQdxStartAcquisition(session);
        running = true;
        while (running) {
            tick++;
            SmartDashboard.putNumber("CameraThread Tick",tick);

            NIVision.IMAQdxGrab(session,1);

            Image filteredFrame = null;

            NIVision.imaqColorThreshold(filteredFrame,ColorMode.HSL,new Range(0,new Range(128,255));

            CameraServer.getInstance().setimage(filteredFrame);

            Timer.delay(0.01);
        }
        NIVision.IMAQdxStopAcquisition(session);
    }
    running = false;
}
项目:FRC6414program    文件Robot.java   
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
@Override
public void robotinit() {
    chooser.addDefault("gear",new HangGear());
    chooser.addobject("baseline",new BaseLine());
    SmartDashboard.putData("Auto",chooser);

    oi = new OI();

    UsbCamera cam = CameraServer.getInstance().startAutomaticCapture(0);
    cam.setResolution(640,480);
    cam.setFPS(60);
    UsbCamera cam1 = CameraServer.getInstance().startAutomaticCapture(1);
    cam1.setResolution(640,480);
    cam1.setFPS(60);
    SmartDashboard.putString("Robot State:","started");
    System.out.println("Robot init");
    chassis.startMonitor();
    intaker.startMonitor();

    shooter.setSleepTime(100);
    shooter.startMonitor();

    stirrer.setSleepTime(300);
    stirrer.startMonitor();

    uSensor.setSleepTime(100);
    uSensor.startMonitor();
}
项目:FRC-2017-Command    文件Vision.java   
public void setUpCamera(){
    CameraServer.getInstance().startAutomaticCapture();
    camera = CameraServer.getInstance().startAutomaticCapture();
    //camera.setResolution(RobotMap.IMG_WIDTH,RobotMap.IMG_HEIGHT);
    //camera.setExposureManual(RobotMap.exposure);
    visionThread = new VisionThread(camera,new gripPipeline(),pipeline -> {
        if (!pipeline.filterContoursOutput().isEmpty()) {
            Rect r = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0));
            synchronized (imgLock) {
                centerX = r.x + (r.width / 2);
            }
        }
    });
}
项目:2017    文件VisionProcessor.java   
/**
 * Creates the object and starts the camera servera
 * 
 * @param ip
 *            the IP of the the axis camera (usually 10.3.39.11)
 * @param camera
 *            the brand / model of the camera
 */
public VisionProcessor (String ip,in order to grab the
    // stream.
    this.camera = CameraServer.getInstance()
            .addAxisCamera("Vision Camera",ip);

    // Based on the selected camera type,set the field of views and focal
    // length.
    this.cameraModel = camera;
    switch (this.cameraModel)
        {
        case AXIS_M1011:
            this.horizontalFieldOfView = M1011_HORIZ_FOV;
            this.verticalFieldOfView = M1011_VERT_FOV;
            break;
        case AXIS_M1013:
            this.horizontalFieldOfView = M1013_HORIZ_FOV;
            this.verticalFieldOfView = M1013_VERT_FOV;
            break;

        default: // Data will default to one to avoid any "divide by zero"
                 // errors.
            this.horizontalFieldOfView = 1;
            this.verticalFieldOfView = 1;
        }

}
项目:2017    文件VisionProcessor.java   
/**
 * The method that processes the image and inputs it into the particle reports
 */
public void processImage ()
{
    // Gets the error code while getting the new image from the camera.
    // If the error code is not 0,then there is no error.
    long errorCode = CameraServer.getInstance()
            .getVideo("Vision Camera").grabFrame(image);

    if (image.empty())
        {
        System.out.println("Image is Empty! Unable to process image!");
        return;
        }

    if (errorCode == 0)
        {
        System.out.println(
                "There was an error grabbing the image. See below:");
        System.out.println(
                CameraServer.getInstance().getVideo().getError());
        }

    // The process image function found in the AutoGenVision class.
    super.process(image);
    // If this throws an error,make sure the grip project ends with a
    // filterContours function.
    this.createParticleReports(super.filterContoursOutput());
    // Sort the particles from largest to smallest
    Arrays.sort(particleReports);
    // for (int i = 0; i < particleReports.length; i++)
    // {
    // System.out.println(i + " " + particleReports[i].area);
    // }
}
项目:SteamWorks    文件Robot.java   
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
public void robotinit() {
RobotMap.init();
    // BEGIN AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
    driveTrain = new DriveTrain();
    shooter = new Shooter();
    lift = new Lift();

// END AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
    // OI must be constructed after subsystems. If the OI creates Commands
    //(which it very likely will),subsystems are not guaranteed to be
    // constructed yet. Thus,their requires() statements may grab null
    // pointers. Bad news. Don't move it.
    oi = new OI();

    // instantiate the command used for the autonomous period
    // BEGIN AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=AUTONOMOUS

    autonomousCommand = new AutonomousCommand();

// END AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=AUTONOMOUS
    CameraServer cameraServer = CameraServer.getInstance();
    System.out.println("Camera sources:" + VideoSource.enumerateSources().length);
    for (VideoSource videoSource : VideoSource.enumerateSources()) {
        System.out.println("Camera: " + videoSource.getName());
    }

    UsbCamera  camera= cameraServer.startAutomaticCapture();
    System.out.println("Started camera capture.");
 // Hard coded camera address
    cameraServer.addAxisCamera("AxisCam ye","10.26.67.42");
  //  visionThread = new VisionThread(camera,new gripPipeline());

    driveTrainChooser = new SendableChooser();
    driveTrainChooser.addDefault("default PWM",DriveTrain.DriveMode.PWM);
    for (DriveTrain.DriveMode driveMode : DriveTrain.DriveMode.values()) {
        driveTrainChooser.addobject(driveMode.name(),driveMode);
    }
}
项目:SteamWorks    文件Robot.java   
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
public void robotinit() {
RobotMap.init();
    // BEGIN AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
    driveTrain = new DriveTrain();
    shooter = new Shooter();

// END AUTOGENERATED CODE,new gripPipeline());    
}
项目:FRC6706_JAVA2017    文件VisionSubsystem.java   
public void initDefaultCommand() {
    visionThread = new Thread(() -> {
    // Get the UsbCamera from CameraServer
    UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
    // Set the resolution
    camera.setResolution(640,5);
        // Give the output stream a new image to display
        outputStream.putFrame(mat);
    }
});
visionThread.setDaemon(true);
visionThread.start();
  }
项目:2017-emmet    文件Robot.java   
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
public void robotinit() {
    RobotMap.init();
    // BEGIN AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
    driveTrain = new DriveTrain();
    pDP = new PDP();
    intake = new Intake();
    climber = new climber();
    shooter = new Shooter();

// END AUTOGENERATED CODE,their requires() statements may grab null
    // pointers. Bad news. Don't move it.
    oi = new OI();

    // initializes networktable
    table = NetworkTable.getTable("HookContoursReport");

    // sets up camera switcher
    server = CameraServer.getInstance();
    server.startAutomaticCapture(0);
    // cameraInit();

    // set up sendable chooser for autonomous
    autochooser = new SendableChooser();
    autochooser.addDefault("Middle Hook",new AUTOMiddleHook());
    autochooser.addobject("Left Hook",new AUTOLeftHook());
    autochooser.addobject("RightHook",new AUTORightHook());
    SmartDashboard.putData("Auto Chooser",autochooser);


    // instantiate the command used for the autonomous period

    // BEGIN AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=AUTONOMOUS


// END AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=AUTONOMOUS
}
项目:2017-emmet    文件Robot.java   
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
// @Override
public void robotinit() {

    System.out.println("1");

    RobotMap.init();

    drivetrain = new Drivetrain();
    climber = new climber();
    // fuel = new Fuel();
    gear = new Gear();

    oi = new OI();

    // initializes camera server
    server = CameraServer.getInstance();
    cameraInit();

    // initializes auto chooser
    autochooser = new SendableChooser();
    autochooser.addDefault("Middle Hook",new AutoMiddleHook());
    autochooser.addobject("Loading Station Hook",new AutoLeftHook());
    autochooser.addobject("Boiler Side Hook",new AutoRightHook());
    SmartDashboard.putData("Auto mode",autochooser);
    // auto delay
    SmartDashboard.putNumber("Auto delay",0);

    // resets all sensors
    resetAllSensors();
}
项目:PowerUp-2018    文件Vision.java   
/**
 * Start both the left and right camera streams and combine them into a single one which is then pushed
 * to an output stream titled Concat.
 * This method should only be used for starting the camera stream.
 */
private void concatStreamStart() {
    cameraLeft = CameraServer.getInstance().startAutomaticCapture("Left",0);
    cameraRight = CameraServer.getInstance().startAutomaticCapture("Right",1);

    /// Dummy sinks to keep camera connections open.
    CvSink cvsinkLeft = new CvSink("leftSink");
    cvsinkLeft.setSource(cameraLeft);
    cvsinkLeft.setEnabled(true);
    CvSink cvsinkRight = new CvSink("rightSink");
    cvsinkRight.setSource(cameraRight);
    cvsinkRight.setEnabled(true);

    /// Matrices to store each image from the cameras.
    Mat leftSource = new Mat();
    Mat rightSource = new Mat();

    /// The ArrayList of left and right sources is needed for the hconcat method used to combine the streams
    ArrayList<Mat> sources = new ArrayList<>();
    sources.add(leftSource);
    sources.add(rightSource);

    /// Concatenation of both matrices
    Mat concat = new Mat();

    /// Puts the combined video on the SmartDashboard (I think)
    /// The width is multiplied by 2 as the dimensions of the stream will have a width two times that of a single webcam
    CvSource outputStream = CameraServer.getInstance().putVideo("Concat",2*Constants.CAM_WIDTH,Constants.CAM_HEIGHT);

    while (!Thread.interrupted()) {
        /// Provide each mat with the current frame
        cvsinkLeft.grabFrame(leftSource);
        cvsinkRight.grabFrame(rightSource);
        /// Combine the frames into a single mat in the Output and stream the image.
        Core.hconcat(sources,concat);
        outputStream.putFrame(concat);
    }
}
项目:2017TestBench    文件Robot.java   
/**
    * This function is run when the robot is first started up and should be
    * used for any initialization code.
    */
   //@SuppressWarnings("unused")
public void robotinit() {
   RobotMap.init();
       // BEGIN AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
       motors = new Motors();
       camera = new Camera();
       electrical = new Electrical();
       bling = new Bling();

       // END AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
       // OI must be constructed after subsystems. If the OI creates Commands
       //(which it very likely will),subsystems are not guaranteed to be
       // constructed yet. Thus,their requires() statements may grab null
       // pointers. Bad news. Don't move it.
       oi = new OI();

       // instantiate the command used for the autonomous period
       // BEGIN AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=AUTONOMOUS

       autonomousCommand = new AutonomousCommand();
       //LiveWindow liveWin = new LiveWindow();

       // END AUTOGENERATED CODE,SOURCE=ROBOTBUILDER ID=AUTONOMOUS 
       int pulseWidthPos = Motors.TestCANTalon.getpulseWidthPosition();
       int pulseWidthUs =  Motors.TestCANTalon.getpulseWidthRisetoFallUs();
       int periodUs =  Motors.TestCANTalon.getpulseWidthRisetoRiseUs();
       int pulseWidthVel = Motors.TestCANTalon.getpulseWidthVeLocity();
       // Check to see if encoder is plugged in
       FeedbackDeviceStatus sensorStatus = Motors.TestCANTalon.isSensorPresent(FeedbackDevice.CtreMagEncoder_Absolute);
       boolean sensorPluggedIn = (FeedbackDeviceStatus.FeedbackStatusPresent == sensorStatus);
       Motors.TestCANTalon.changeControlMode(TalonControlMode.PercentVbus);
      Motors.TestCANTalon.setFeedbackDevice(FeedbackDevice.CtreMagEncoder_Absolute);
       CameraServer.getInstance().startAutomaticCapture();
   }
项目:VikingRobot    文件Robot.java   
@Override
public void robotinit() {
    compressor.start();
    CameraServer.getInstance().startAutomaticCapture(0);
    autoSelector = new SendableChooser<>();
    autoSelector.addDefault("Drive Forward",new DriveForward());
    autoSelector.addobject("Place Middle Gear",new MidGearauto());
    autoSelector.addobject("Place Right Gear <<< Feeling lucky?",new RightGearauto());
    autoSelector.addobject("Shoot on Blue Alliance",new BlueShootAuto());
    autoSelector.addobject("Shoot on Red Alliance",new RedShootAuto());
    SmartDashboard.putData("Autonomous Command",autoSelector);
    visiontracking = new VisionTracking();
    //created this last for ordering issues
    oi = new OperatorInterface();
}
项目:STEAMworks    文件VisionTest.java   
public Visiontest() {
        UsbCamera camera = CameraServer.getInstance().startAutomaticCapture(); // cam0 by default
        camera.setResolution(IMG_WIDTH,IMG_HEIGHT);
        camera.setBrightness(0);
//      camera.setExposureManual(100);
        camera.setExposureAuto();

        CvSource cs= CameraServer.getInstance().putVideo("name",IMG_WIDTH,pipeline -> {
            Mat IMG_MOD = pipeline.hslThresholdOutput();
            if (!pipeline.filterContoursOutput().isEmpty()) {
                //Rect recCombine = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0));
                Rect recCombine = computeBoundingRectangle(pipeline.filterContoursOutput());
                if (recCombine == null) {
                    targetDetected = false;
                    return;
                }
                targetDetected = true;
                computeCoords(recCombine);
                synchronized (imgLock) {
                    centerX = recCombine.x + (recCombine.width / 2);
                }

                Imgproc.rectangle(IMG_MOD,new Point(recCombine.x,recCombine.y),new Point(recCombine.x + recCombine.width,recCombine.y + recCombine.height),20,0),5);

            } else {
                targetDetected = false;
            }

            cs.putFrame(IMG_MOD);
        });

        visionThread.start();
        Relay relay = new Relay(RobotMap.RELAY_CHANNEL,Direction.kForward);
        relay.set(Relay.Value.kOn);
        //this.processImage();
    }
项目:R2017    文件CameraSet.java   
public CameraSet(Controls controls,String devpath1,String devpath2) {
        this.controls = controls;
        this.cam1 = CameraServer.getInstance().startAutomaticCapture("Back",devpath2);
        this.cam2 = CameraServer.getInstance().startAutomaticCapture("Front",devpath1);

//        cam1.setResolution((int) (this.multiplier * 160),(int) (this.multiplier * 120));
//        cam2.setResolution((int) (this.multiplier * 160),(int) (this.multiplier * 120));

        outputStream = CameraServer.getInstance().putVideo("camera_set",(int) (multiplier * 160),(int) (multiplier * 120));
        source = new Mat();

    }
项目:2017-Steamworks    文件Vision.java   
/**
 * Swaps the current camera streaming to grip / SmartDashboard with the one
 * not streaming (and not capturing)
 */
public void toggleVisionCamera() {
    if (isGearCamActive) {
        CameraServer.getInstance().getServer().setSource(shooterCam);
        isGearCamActive = false;
    } else {
        CameraServer.getInstance().getServer().setSource(gearCam);
        isGearCamActive = true;
    }

}
项目:2016-Robot-Final    文件Robot.java   
public void robotinit() {

    // Create subsystems
    drive = new Drive();
    intake = new Intake();
    arm = new Arm();
    shooter = new Shooter();
    hanger = new Hanger();

    oi = new OI();

    // Create motion profiles
    crossLowBar = new Profile(AutoMap.crossLowBar);
    reach = new Profile(AutoMap.reach);
    toShoot = new Profile(AutoMap.toShoot);
    toLowGoal = new Profile(AutoMap.toLowGoal);

    // Pick an auto
    chooser = new SendableChooser();
    chooser.addDefault("Do nothing",new Donothing());
    chooser.addobject("Low Bar",new LowBarRawtonomous());
    chooser.addobject("Low Bar (profile)",new CrossLowBar(crossLowBar));
    chooser.addobject("Reach",new Reach(reach));
    chooser.addobject("Forward Rawto",new ForwardRawtonomous());
    chooser.addobject("Backward Rawto",new BackwardRawtonomous());
    chooser.addobject("Shoot",new AutoShoot());
    SmartDashboard.putData("Auto mode",chooser);

    // Start camera stream
    camera = new USBCamera();
    server = CameraServer.getInstance();
    server.setQuality(40);
    server.startAutomaticCapture(camera);

    // Start compressor
    compressor = new Compressor();
    compressor.start();

    navx = new AHRS(SPI.Port.kMXP);
    }
项目:PCRobotClient    文件Robot.java   
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
public void robotinit() {
    server = CameraServer.getInstance();
    server.setQuality(80);
    //the camera name (ex "cam0") can be found through the roborio web interface
    server.startAutomaticCapture("cam0");
    serverThread = new Thread(new ServerLoop(this));
    serverThread.start();
}
项目:2016    文件VisionProcessor.java   
/**
 * Creates the object and starts the camera servera
 * 
 * @param ip
 *            the IP of the .mjpg the axis camera outputs
 * @param camera
 *            the brand / model of the camera
 */
public VisionProcessor (String ip,set the field of views and focal
    // length.
    this.cameraModel = camera;
    switch (this.cameraModel)
        {
        case AXIS_M1011:
            this.horizontalFieldOfView = M1011_HORIZ_FOV;
            this.verticalFieldOfView = M1011_VERT_FOV;
            break;
        case AXIS_M1013:
            this.horizontalFieldOfView = M1013_HORIZ_FOV;
            this.verticalFieldOfView = M1013_VERT_FOV;
            break;

        default: // Data will default to one to avoid any "divide by zero"
                 // errors.
            this.horizontalFieldOfView = 1;
            this.verticalFieldOfView = 1;
        }

}
项目:2016    文件VisionProcessor.java   
/**
 * The method that processes the image and inputs it into the particle reports
 */
public void processImage ()
{
    // Gets the error code while getting the new image from the camera.
    // If the error code is not 0,make sure the grip project ends with a
    // filterContours function.
    this.createParticleReports(super.filterContoursOutput());
    // Sort the particles from largest to smallest
    Arrays.sort(particleReports);
    // for (int i = 0; i < particleReports.length; i++)
    // {
    // System.out.println(i + " " + particleReports[i].area);
    // }
}
项目:2016-Robot    文件Robot.java   
public void robotinit() {

    // Create subsystems
    drive = new Drive();
    intake = new Intake();
    arm = new Arm();
    shooter = new Shooter();
    hanger = new Hanger();

    oi = new OI();

    // Create motion profiles
    crossLowBar = new Profile(AutoMap.crossLowBar);
    reach = new Profile(AutoMap.reach);
    toShoot = new Profile(AutoMap.toShoot);
    toLowGoal = new Profile(AutoMap.toLowGoal);

    // Pick an auto
    chooser = new SendableChooser();
    chooser.addDefault("Do nothing",chooser);

    // Start camera stream
    camera = new USBCamera();
    server = CameraServer.getInstance();
    server.setQuality(40);
    server.startAutomaticCapture(camera);

    // Start compressor
    compressor = new Compressor();
    compressor.start();

    navx = new AHRS(SPI.Port.kMXP);
    }
项目:Stronghold    文件Robot.java   
public void updateCamera() {

    if (frame == null || currSession == 0)
        return;

    String cameraSelected = (String) cameraSelector.getSelected();
    NIVision.IMAQdxGrab(currSession,1);
    CameraServer.getInstance().setimage(frame);

    if (cameraSelected == lastSelected) {
        return;
    }
    switch (cameraSelected) {
    case "Front View":
        NIVision.IMAQdxStopAcquisition(currSession);
        currSession = sessionfront;
        NIVision.IMAQdxconfigureGrab(currSession);
        Robot.drivetrain.ForwardDrive();
        lastSelected = "Front View";
        break;
    default:
    case "Back View":
        NIVision.IMAQdxStopAcquisition(currSession);
        currSession = sessionback;
        NIVision.IMAQdxconfigureGrab(currSession);
        Robot.drivetrain.ReverseDrive();
        lastSelected = "Back View";
        break;
    case "Manual Change":
        break;
    }
}
项目:liastem    文件Robot.java   
@Override
public void robotinit() {
    chooser.addDefault("Default Auto",chooser);
    UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
    camera.setResolution(IMG_WIDTH,IMG_HEIGHT);

    visionThread = new VisionThread(Camera,new mar4grip(),Pipeline -> { 
        if (Pipeline.filterContoursOutput().isEmpty()) {
            Rect r = Imgproc.boundingRect(Pipeline.findContoursOutput().get(0));
                centerX = r.x + (r.width / 2);
        }
    });
}
项目:liastem    文件FinalRobot.java   
@Override
public void robotinit() {
 chooser.addDefault("Default Auto",defaultAuto);
 chooser.addobject("My Auto",customAuto);
 SmartDashboard.putData("Auto modes",chooser);
 UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
 camera.setResolution(IMG_WIDTH,IMG_HEIGHT);

 visionThread = new VisionThread(Camera,new grippipeline(),pipeline -> { 
        if (!pipeline.filterContoursOutput().isEmpty()) {
            Rect r = Imgproc.boundingRect(pipeline.findContoursOutput().get(0));
                centerX = r.x + (r.width / 2);
        }
    });
}
项目:2016-Robot-Code    文件Robot.java   
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
public void robotinit() {
    NetworkTable.globalDeleteall();

 oi = new OI();
 teleop = new Teleop();
 uHGSD = new UpdateHighGoalShooterDashboard();
 autonomousCommand = new Autonomous(2,2);
 CameraServer server = CameraServer.getInstance();
 server.startAutomaticCapture("cam0"); 

 hood.resetEncoder(hood.HOOD_START);
}
项目:Cogsworth    文件Cameras.java   
/**
 * Creates the default camera (cam0) and the cvSink
 */
public Cameras() {
    // Define Variables
    camNum = 0;
    lastSwitched = 0;
    source = new Mat();
    output = new Mat();

    // Setup default camera
    cam0 = new UsbCamera("cam0",0);
    cam0.setResolution(320,240);
    CameraServer.getInstance().addCamera(cam0);
    cvSink = CameraServer.getInstance().getVideo(cam0);
    outputStream = CameraServer.getInstance().putVideo("cams",320,240);
}
项目:1797-2017    文件RobotMap.java   
public static void init() {

        // Drivetrain
        DRIVETRAIN_ROBOT_DRIVE = new RobotDrive(0,1);

        DRIVETRAIN_ENCODER_LEFT = new Encoder(0,1);
        DRIVETRAIN_ENCODER_LEFT.setdistancePerpulse(0.0481);

        DRIVETRAIN_ENCODER_RIGHT = new Encoder(2,3,true);
        DRIVETRAIN_ENCODER_RIGHT.setdistancePerpulse(0.0481);

        // Floor Gear
        FLOORGEAR_INTAKE = new VictorSP(2);
        FLOORGEAR_LIFTER = new DoubleSolenoid(0,1);
        FLOORGEAR_BLOCKER = new DoubleSolenoid(2,3);

        // climber
        climBER = new VictorSP(3);

        // Passive Gear
        SLOTGEAR_HOLDER = new DoubleSolenoid(4,5);

        // Vision
        VISION_SERVER = CameraServer.getInstance();

        VISION_CAMERA = VISION_SERVER.startAutomaticCapture("FRONT",0);

        VISION_CAMERA.getProperty("saturation").set(20);
        VISION_CAMERA.getProperty("gain").set(50);
        VISION_CAMERA.getProperty("exposure_auto").set(1);
        VISION_CAMERA.getProperty("brightness").set(50);
        VISION_CAMERA.getProperty("exposure_absolute").set(1);
        VISION_CAMERA.getProperty("exposure_auto_priority").set(0);

    }
项目:FRCStronghold2016    文件Cameras.java   
public Cameras() {
    server = CameraServer.getInstance();
    frame = NIVision.imaqCreateImage(NIVision.ImageType.IMAGE_RGB,0);
    sessionFront = NIVision.IMAQdxOpenCamera(RobotMap.FRONT_CAMERA,NIVision.IMAQdxcameraControlMode.CameraControlModeController);
    NIVision.IMAQdxconfigureGrab(sessionFront);
    currentSession = sessionFront;
    sessionSet = true;

}
项目:2015-beaker-Competition    文件Robot.java   
public void teleopPeriodic() {
    Scheduler.getInstance().run();
    SmartDashboard.putNumber("Elevator Pot Value: ",Robot.elevator.getpotVal());
    SmartDashboard.putNumber("Arm Pot Value: ",Robot.arm.getpotVal());
    NIVision.IMAQdxGrab(session,1);
    CameraServer.getInstance().setimage(frame);
}
项目:FRC1076-2015-Competition_Robot    文件Robot.java   
public void run() {
       NIVision.IMAQdxStartAcquisition(session);
    while(true){
        NIVision.IMAQdxGrab(session,1);
        CameraServer.getInstance().setimage(frame);
        try {
            Thread.sleep(200);
        } catch (InterruptedException e) {
            e.printstacktrace();
        }
    }
       //NIVision.IMAQdxStopAcquisition(session);
}

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。