How to give camera position and camera focal point as utm format in VTK

Hello Guys,

I am facing a issue which i want to give lat and long and distance of the camera from ground in utm format. but my camera is only considering lat, long manner , here i attaching my code… please find a solution for me

  #include <math.h>
  
  #include <string.h>
  
  #include <sys/time.h>
  
  #include <cuda_runtime_api.h>
  
    #include <tuple>
    
    #include <GeographicLib/UTMUPS.hpp>
    
    #include <opencv2/opencv.hpp>
    
    #include <iostream>
    
    #include <cmath>
    
    #include <vector>
    
    #include <random>
    
    #include <string>
    
    #include <map>
    
    #include <gdal.h>
    
    #include <gdal_priv.h>
    
    #include <ogrsf_frmts.h>
    
    #include "ogr_spatialref.h"
    
    #include <gdal_priv.h> // For GDAL, used to read shapefiles and handle projections
    
    #include <ogr_api.h>
    
    #include <ogr_geometry.h>
    
    #include <ogr_feature.h>
    
    #include <utility>
    
    #include <vtk-9.1/vtkActor.h>
    
    #include <vtk-9.1/vtkCamera.h>
    
    #include <vtk-9.1/vtkCylinderSource.h>
    
    #include <vtk-9.1/vtkNamedColors.h>
    
    #include <vtk-9.1/vtkNew.h>
    
    #include <vtk-9.1/vtkPolyDataMapper.h>
    
    #include <vtk-9.1/vtkProperty.h>
    
    #include <vtk-9.1/vtkRenderWindow.h>
    
    #include <vtk-9.1/vtkRenderWindowInteractor.h>
    
    #include <vtk-9.1/vtkRenderer.h>
    
    #include <vtk-9.1/vtkPolyData.h> // For 3D plotting (alternative to PyVista)
    
    #include <vtk-9.1/vtkSmartPointer.h>
    
    #include <vtk-9.1/vtkArcPlotter.h> // For plotting
    
    #include <vtk-9.1/vtkPoints.h>
    
    #include <vtk-9.1/vtkCellArray.h>
    
    #include <vtk-9.1/vtkCellArrayIterator.h>
    
    #include <vtk-9.1/vtkFloatArray.h>
    
    #include <vtk-9.1/vtkColor.h>
    
    #include <vtk-9.1/vtkPNGWriter.h>
    
    #include <vtk-9.1/vtkWindowToImageFilter.h>
    
    #include <vtk-9.1/vtkNamedColors.h>
    
    #include <array>

OGRLayer* shapefile(const char* shapefilePath) {
    // Step 1: Open the shapefile
    GDALDataset* dataset = (GDALDataset*)GDALOpenEx(shapefilePath, GDAL_OF_VECTOR, nullptr, nullptr, nullptr);
    if (dataset == nullptr) {
        std::cerr << "Failed to open shapefile: " << shapefilePath << std::endl;
        return nullptr;
    }

    std::cout << "Shapefile opened successfully!" << std::endl;

    // Step 2: Get the layer from the shapefile
    OGRLayer* layer = dataset->GetLayer(0);
    if (layer == nullptr) {
        std::cerr << "Failed to get layer from shapefile." << std::endl;
        GDALClose(dataset);
        return nullptr;
    }

    // Step 3: Get the current spatial reference system of the layer
    OGRSpatialReference* sourceSRS = layer->GetSpatialRef();
    if (sourceSRS == nullptr) {
        std::cerr << "Shapefile does not have a spatial reference system." << std::endl;
        GDALClose(dataset);
        return nullptr;
    }

    char* sourceWKT = nullptr;
    sourceSRS->exportToWkt(&sourceWKT);
    std::cout << "Source CRS: " << sourceWKT << std::endl;
    CPLFree(sourceWKT);

    // Step 4: Define the target spatial reference system (EPSG:32617)
    OGRSpatialReference targetSRS;
    targetSRS.importFromEPSG(32617);

    // Step 5: Create a coordinate transformation
    OGRCoordinateTransformation* coordTransform = OGRCreateCoordinateTransformation(sourceSRS, &targetSRS);
    if (coordTransform == nullptr) {
        std::cerr << "Failed to create coordinate transformation." << std::endl;
        GDALClose(dataset);
        return nullptr;
    }

    std::cout << "Coordinate transformation created successfully to EPSG:32617." << std::endl;

    // Step 6: Iterate through the features and transform their geometries
    OGRFeature* feature;
    layer->ResetReading();
    while ((feature = layer->GetNextFeature()) != nullptr) {
        OGRGeometry* geometry = feature->GetGeometryRef();
        if (geometry == nullptr) {
            std::cerr << "Feature has no geometry." << std::endl;
            OGRFeature::DestroyFeature(feature);
            continue;
        }

        if (geometry->transform(coordTransform) != OGRERR_NONE) {
            std::cerr << "Failed to transform geometry." << std::endl;
        }

        // Optionally, do something with the transformed geometry

        OGRFeature::DestroyFeature(feature);
    }

    // Clean up
    OCTDestroyCoordinateTransformation(coordTransform);

    std::cout << "Shapefile reprojected to EPSG:32617." << std::endl;

    // Return the reprojected layer
    return layer;
}

std::vector<vtkSmartPointer<vtkPolyData>> processGeometries(OGRLayer* transformedLayer, vtkRenderer* renderer, vtkRenderWindow* renderWindow) {
    // Check if the layer is valid
    if (transformedLayer == nullptr) {
        std::cerr << "Layer is empty." << std::endl;
        return {}; // Return empty vector if the layer is invalid
    }

    // Create a vector to store poly meshes
    std::vector<vtkSmartPointer<vtkPolyData>> polyMeshes;

    OGRFeature* poFeature;
    OGRGeometry* poGeometry;

    // Create a coordinate transformation for EPSG:4326 -> EPSG:32617
    OGRSpatialReference oSRS1, oSRS2;
    oSRS1.SetWellKnownGeogCS("EPSG:4326");  // Input CRS (WGS 84)
    oSRS2.SetProjCS("EPSG:32617");          // Target CRS (UTM Zone 17N)
    OGRCoordinateTransformation* poTransform = OGRCreateCoordinateTransformation(&oSRS1, &oSRS2);

    if (poTransform == nullptr) {
        std::cerr << "Failed to create coordinate transformation." << std::endl;
        return {};
    }

    // Iterate through each feature in the layer
    transformedLayer->ResetReading();
    int featureCount = 0;

    while ((poFeature = transformedLayer->GetNextFeature()) != nullptr) {
        featureCount++;
        poGeometry = poFeature->GetGeometryRef();

        if (poGeometry) {
            // Print geometry type name
            OGRwkbGeometryType geomType = poGeometry->getGeometryType();
            std::cout << "Processing Geometry Type: " << OGRGeometryTypeToName(geomType) << std::endl;

            // Apply CRS transformation to the geometry
            if (geomType == wkbPolygon || geomType == wkbMultiPolygon) {
                // Handle Polygon and MultiPolygon geometries separately
                OGRGeometry* transformedGeometry = poGeometry->clone(); // Clone to transform

                // Transform each point in the geometry
                OGRPolygon* poPolygon = nullptr;
                if (geomType == wkbPolygon) {
                    poPolygon = (OGRPolygon*)transformedGeometry;
                } else if (geomType == wkbMultiPolygon) {
                    OGRMultiPolygon* poMultiPolygon = (OGRMultiPolygon*)transformedGeometry;
                    for (int i = 0; i < poMultiPolygon->getNumGeometries(); i++) {
                        poPolygon = (OGRPolygon*)poMultiPolygon->getGeometryRef(i);
                    }
                }

                if (poPolygon) {
                    OGRLinearRing* poRing = poPolygon->getExteriorRing();
                    for (int i = 0; i < poRing->getNumPoints(); i++) {
                        OGRPoint point;
                        poRing->getPoint(i, &point);

                        double x = point.getX();
                        double y = point.getY();
                        double z = point.getZ();

                        // Transform the point coordinates
                        if (poTransform->Transform(1, &x, &y, &z)) {
                            poRing->setPoint(i, x, y);
                        } else {
                            std::cerr << "Coordinate transformation failed for point " << i << "." << std::endl;
                        }
                    }
                }

                // Proceed to handle transformed geometries after transformation
                vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
                for (int i = 0; i < poPolygon->getExteriorRing()->getNumPoints(); i++) {
                    OGRPoint point;
                    poPolygon->getExteriorRing()->getPoint(i, &point);
                    points->InsertNextPoint(point.getX(), point.getY(), 0.0);
                }

                vtkSmartPointer<vtkCellArray> cells = vtkSmartPointer<vtkCellArray>::New();
                vtkSmartPointer<vtkPolyData> polyMesh = vtkSmartPointer<vtkPolyData>::New();

                vtkIdType numPoints = points->GetNumberOfPoints();
                vtkIdType* ids = new vtkIdType[numPoints];
                for (vtkIdType i = 0; i < numPoints; i++) {
                    ids[i] = i;
                }
                cells->InsertNextCell(numPoints, ids);
                delete[] ids;

                polyMesh->SetPoints(points);
                polyMesh->SetPolys(cells);

                std::cout << "Polygon geometry with " << numPoints << " points." << std::endl;
                polyMeshes.push_back(polyMesh);
            }
            // Handle other geometry types (LineString, Point, etc.)
            else if (geomType == wkbLineString) {
                // Handle LineString geometry
                OGRLineString* poLineString = (OGRLineString*)poGeometry;
                vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
                for (int i = 0; i < poLineString->getNumPoints(); i++) {
                    OGRPoint point;
                    poLineString->getPoint(i, &point);
                    points->InsertNextPoint(point.getX(), point.getY(), 0.0);
                }

                vtkSmartPointer<vtkCellArray> cells = vtkSmartPointer<vtkCellArray>::New();
                vtkSmartPointer<vtkPolyData> polyMesh = vtkSmartPointer<vtkPolyData>::New();

                vtkIdType numPoints = points->GetNumberOfPoints();
                vtkIdType* ids = new vtkIdType[numPoints];
                for (vtkIdType i = 0; i < numPoints; i++) {
                    ids[i] = i;
                }
                cells->InsertNextCell(numPoints, ids);
                delete[] ids;

                polyMesh->SetPoints(points);
                polyMesh->SetLines(cells);

                std::cout << "LineString geometry with " << numPoints << " points." << std::endl;
                polyMeshes.push_back(polyMesh);
            }
            else if (geomType == wkbPoint) {
                // Handle Point geometry
                OGRPoint* poPoint = (OGRPoint*)poGeometry;
                vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
                points->InsertNextPoint(poPoint->getX(), poPoint->getY(), 0.0);

                vtkSmartPointer<vtkCellArray> cells = vtkSmartPointer<vtkCellArray>::New();
                vtkSmartPointer<vtkPolyData> polyMesh = vtkSmartPointer<vtkPolyData>::New();

                vtkIdType ids[1] = {0};
                cells->InsertNextCell(1, ids);

                polyMesh->SetPoints(points);
                polyMesh->SetVerts(cells);

                std::cout << "Point geometry." << std::endl;
                polyMeshes.push_back(polyMesh);
            }
        } else {
            std::cerr << "No geometry found for feature." << std::endl;
        }

        OGRFeature::DestroyFeature(poFeature);
    }

    std::cout << "Processed " << polyMeshes.size() << " geometries from " << featureCount << " features." << std::endl;

    // Debugging: Render and print information about the meshes
    vtkSmartPointer<vtkNamedColors> colors = vtkSmartPointer<vtkNamedColors>::New();
    for (auto& polyMesh : polyMeshes) {
        std::cout << "Processing mesh with number of points: " << polyMesh->GetPoints()->GetNumberOfPoints() << std::endl;

        if (polyMesh->GetPoints()->GetNumberOfPoints() == 0) {
            std::cerr << "Empty mesh found!" << std::endl;
            continue;
        }

        // Create mapper and actor for rendering
        vtkSmartPointer<vtkPolyDataMapper> mapper = vtkSmartPointer<vtkPolyDataMapper>::New();
        mapper->SetInputData(polyMesh);

        vtkSmartPointer<vtkActor> actor = vtkSmartPointer<vtkActor>::New();
        actor->SetMapper(mapper);
        actor->GetProperty()->SetColor(colors->GetColor4d("Red").GetData()); // Set color to red
        actor->GetProperty()->SetLineWidth(5.0); // Increase line thickness
        actor->GetProperty()->SetPointSize(8.0); // Increase point size for points
        actor->GetProperty()->SetOpacity(1.0); // Set opacity

        // Add the actor to the renderer
        renderer->AddActor(actor);
        std::cout << "Added actor for mesh with " << polyMesh->GetPoints()->GetNumberOfPoints() << " points." << std::endl;
    }

    // Set camera with azimuth and elevation
    vtkSmartPointer<vtkCamera> camera = renderer->GetActiveCamera();
    camera->SetPosition(594132.5056322503, 4493767.354823077, 150.01129150390625);
    camera->SetFocalPoint( 594132.5056322503, 4493767.354823077, 0); 
    camera->SetViewUp(0.0, 1.0, 0.0); 
    camera->Azimuth(0);  // Set azimuth

    camera->Elevation(60);  // Set elevation
   

    camera->SetViewAngle(89.9);
    renderer->ResetCamera();
    std::cout << "Camera Position: "
          << camera->GetPosition()[0] << ", "
          << camera->GetPosition()[1] << ", "
          << camera->GetPosition()[2] << std::endl;

    std::cout << "Camera Focal Point: "
          << camera->GetFocalPoint()[0] << ", "
          << camera->GetFocalPoint()[1] << ", "
          << camera->GetFocalPoint()[2] << std::endl;

    std::cout << "Camera View Up: "
          << camera->GetViewUp()[0] << ", "
          << camera->GetViewUp()[1] << ", "
          << camera->GetViewUp()[2] << std::endl;

    std::cout << "Camera Distance second: "
          << camera->GetDistance() << std::endl;

    renderWindow->Render();
    return polyMeshes;
}

void visualize_scene(double latitude, double longitude, double yaw, double pitch, double distance, double hfov, double vfov) {
    // Check if the latitude and longitude are valid
    if (latitude < -90.0 || latitude > 90.0 || longitude < -180.0 || longitude > 180.0) {
        std::cerr << "Invalid latitude or longitude!" << std::endl;
        return;
    }

    int zone;
    bool northp; // Whether the location is in the northern hemisphere or not
    double easting, northing;

    // Convert latitude and longitude to UTM
    GeographicLib::UTMUPS::Forward(latitude, longitude, zone, northp, easting, northing);
    std::cout << "Using UTM zone: " << zone << ", Easting: " << easting << ", Northing: " << northing << std::endl;

    // Further processing: If the pitch is non-zero, calculate new coordinates
    double new_lat = latitude, new_lon = longitude;
    if (pitch != 0) {
        // Assuming calculate_new_location is defined elsewhere
        calculate_new_location(latitude, longitude, distance, yaw, pitch, hfov, vfov, new_lat, new_lon);
    }

    std::cout << "New Latitude: " << new_lat << ", New Longitude: " << new_lon << std::endl;

    // Convert new latitude and longitude to UTM
    GeographicLib::UTMUPS::Forward(new_lat, new_lon, zone, northp, easting, northing);
    std::cout << "New UTM - Easting: " << easting << ", Northing: " << northing << std::endl;

    // Assuming calculate_camera_position is defined elsewhere
    std::tuple<double, double, double> center = {easting, northing, 0.0};
    std::cout << "Center - Easting: " << std::get<0>(center) << ", Northing: " << std::get<1>(center) << std::endl;

    auto cam_position = calculate_camera_position(center, distance);
    std::cout << "Camera Position Calculated: (" << std::get<0>(cam_position) << ", "
              << std::get<1>(cam_position) << ", " << std::get<2>(cam_position) << ")" << std::endl;

    // Initialize GDAL and OGR
    GDALAllRegister();

    OGRLayer* transformedLayer = shapefile("road_shape/road_shape.shp");
    if (transformedLayer == nullptr) {
        std::cerr << "Failed to load shapefile!" << std::endl;
        return;
    }

    std::cout << "Successfully retrieved transformed layer." << std::endl;

    // Set up VTK to render the scene
    vtkSmartPointer<vtkNamedColors> colors = vtkSmartPointer<vtkNamedColors>::New();
    vtkSmartPointer<vtkRenderer> renderer = vtkSmartPointer<vtkRenderer>::New();
    renderer->SetBackground(colors->GetColor3d("SkyBlue").GetData());

    vtkSmartPointer<vtkRenderWindow> renderWindow = vtkSmartPointer<vtkRenderWindow>::New();
    renderWindow->AddRenderer(renderer);

    vtkSmartPointer<vtkRenderWindowInteractor> renderWindowInteractor = vtkSmartPointer<vtkRenderWindowInteractor>::New();
    renderWindowInteractor->SetRenderWindow(renderWindow);

    // Process geometries from the shapefile
    //          std::vector<vtkSmartPointer<vtkPolyData>> polyMeshes = processGeometries(transformedLayer, renderer, renderWindow);
    std::string outputImagePath = "transparent_colored_polygons_test.png";
    //std::vector<vtkSmartPointer<vtkPolyData>> polyMeshes = processGeometries(
        //transformedLayer, renderer, renderWindow, cam_position, easting, northing, yaw, pitch, hfov, outputImagePath);
    //std::string outputImagePath = "transparent_colored_polygons_test.png";  
        std::vector<vtkSmartPointer<vtkPolyData>> polyMeshes = processGeometries(transformedLayer, renderer, renderWindow);

    }

you can only concentrated on calling this function part , this calling function have this issue
std::vector<vtkSmartPointer> polyMeshes =
processGeometries(transformedLayer, renderer, renderWindow);

While Slicer can be used for some geoinformatics applications, most of the community experience is around biomedical imaging. You may find more geo experts on the VTK forum.