1
Python and Java API / Re: runTimeError: buildRoundTrip : traversal point not reachable
« on: July 12, 2022, 05:49:18 PM »
I will. Thanks a lot, as always
This section allows you to view all posts made by this member. Note that you can only see posts made in areas you currently have access to.
plan_mission_task = Metashape.Tasks.PlanMission()
# mission parameters
plan_mission_task.sensor = 0 # choose the camera that was used for input photos (DJI for x2 test)
plan_mission_task.min_altitude = 5
plan_mission_task.capture_distance = 30
plan_mission_task.horizontal_zigzags = True
plan_mission_task.min_waypoint_spacing = 1
plan_mission_task.overlap = overlap
plan_mission_task.attach_viewpoints = True
plan_mission_task.safety_distance = 10
home = ref_model.shapes[0]
plan_mission_task.home_point = home.key
runTimeError: buildRoundTrip : traversal point not reachable
Any idea what could cause it ? position = Metashape.Vector((['X'], ['Y'],['Z']) # position vector X, Y, Z in chunk.crs 'WGS 84 + EGM96 height (EPSG::9707)'
orientation = Metashape.Vector('yaw', 'pitch', 'roll')) # orientation vector
# calculate relevant transform matrix t
position = ref_model.crs.unproject(position) # position in ECEF
orientation = ref_model.crs.geogcs.localframe(position).rotation().t() * Metashape.Utils.ypr2mat(
orientation) # orientation matrix in ECEF
transform = Metashape.Matrix.Translation(position) * Metashape.Matrix.Rotation(orientation)
transform = ref_model.transform.matrix.inv() * transform * Metashape.Matrix.Diag((1, -1, -1, 1))
cameraT = Metashape.Matrix.Translation(transform.translation()) * Metashape.Matrix.Rotation(
transform.rotation()) # 4x4 transform matrix 3 translations and 3 rotations
# Capture img to new folder
image = ref_model.model.renderImage(cameraT, ref_model.sensors[0].calibration)
def pic_in_circle(center_x, center_y, radius, snapshots_number):
'''
:param center_x: X coordinates of the center of the bbox
:param center_y: Y coordinates of the center of the bbox
:param radius: distance between corners and center of the bbox
:param snapshots_number: number of markers to create
:return: df containing X and Y coordinates of selected points with lenght based on number of snapshots
'''
arr=[]
for i in range(360):
x = center_x + radius*math.cos(i)
y = center_y + radius*math.sin(i)
#Create array with all the x-co and y-co of the circle
arr.append([x,y])
df = pd.DataFrame(arr)
df.columns = ['X', 'Y']
indexes = np.arange(0, len(df), step=(360/snapshots_number))
df = df.iloc[indexes]
return df
circle = pic_in_circle(center.position[0], center.position[1], radius, 50)
with center of the Bbox containing ref model is Vector([4.521869862724644, 50.6898008733669, 4.897874583534293]) X Y 0 38.008388 -199.830643 7 30.059443 -178.610007 14 10.125042 -167.834073 |
for index, row in circle.iterrows():
pos = Metashape.Vector((row['X'], row['Y'], 0))
circleM = ref_model.addMarker()
circleM.label = "circle " + str(index + 1)
circleM.reference.location = ref_model.crs.project(ref_model.transform.matrix.mulp(pos))
circleM.reference.enabled = True
print(circleM.reference.location)
Vector([4.521858890254145, 50.68958594593813, 6.024067006404548]) Vector([4.5218386528562995, 50.68962537116644, 18.59051823922667]) Vector([4.521751947695833, 50.68971443077309, 25.186119392769566]) |
circleM.reference.location = ref_model.crs.project(ref_model.transform.matrix.mulp(pos))
# create function to transform quaternion into rotation matrix ---------------------------------------------------------
def euler_from_quaternion(x, y, z, w):
"""
Convert a quaternion into euler angles (roll, pitch, yaw)
roll is rotation around x in radians (counterclockwise)
pitch is rotation around y in radians (counterclockwise)
yaw is rotation around z in radians (counterclockwise)
"""
t0 = +2.0 * (w * x + y * z)
t1 = +1.0 - 2.0 * (x * x + y * y)
roll_x = math.atan2(t0, t1)*(180/math.pi)
t2 = +2.0 * (w * y - z * x)
t2 = +1.0 if t2 > +1.0 else t2
t2 = -1.0 if t2 < -1.0 else t2
pitch_y = math.asin(t2)*(180/math.pi)
t3 = +2.0 * (w * z + x * y)
t4 = +1.0 - 2.0 * (y * y + z * z)
yaw_z = math.atan2(t3, t4)*(180/math.pi)
return -yaw_z, roll_x, -pitch_y # in degrees in fact it's alpha nu kappa (weird)
doc = Metashape.Document()
doc.open(path=r"C:\Users\Thomas\Documents\projet_x2.psx")
chunk = doc.chunk
position = Metashape.Vector((4.522160467, 50.68980499, 132.3963475)) # position vector X, Y, Z in chunk.crs 'WGS 84 + EGM96 height (EPSG::9707)'
orientation = Metashape.Vector(euler_from_quaternion(0.3773313583,0.3340252627,0.5725150359,0.646741605)) # orientation vector Yaw, Pitch, Roll in chunk.crs
#calculate relevant transform matrix t
position = chunk.crs.unproject(position) # position in ECEF
orientation = chunk.crs.geogcs.localframe(position).rotation().t() * Metashape.Utils.ypr2mat(orientation) # orientation matrix in ECEF
transform = Metashape.Matrix.Translation(position) * Metashape.Matrix.Rotation(orientation)
transform = chunk.transform.matrix.inv() * transform * Metashape.Matrix.Diag((1, -1, -1, 1))
cameraT = Metashape.Matrix.Translation(transform.translation()) * Metashape.Matrix.Rotation(transform.rotation()) # 4x4 transform matrix 3 translations and 3 rotations
image = chunk.model.renderImage(cameraT, chunk.sensors[0].calibration)
image.save(r"C:\Users\Thomas\Documents\render.jpg")
doc = Metashape.Document()
doc.open(path=r"C:\Users\Thomas\Documents\projet_x2.psx")
chunk = doc.chunk
position = Metashape.Vector((4.522160467, 50.68980499, 132.3963475)) # position vector X, Y, Z in chunk.crs 'WGS 84 + EGM96 height (EPSG::9707)'
orientation = Metashape.Vector((276.698,60.522,0)) # orientation vector Yaw, Pitch, Roll in chunk.crs
#calculate relevant transform matrix t
position = chunk.crs.unproject(position) # position in ECEF
orientation = chunk.crs.geogcs.localframe(position).rotation().t() * Metashape.Utils.ypr2mat(orientation) # orientation matrix in ECEF
#orientation = chunk.crs.geogcs.localframe(position).rotation().t() * quaternion_rotation_matrix(0.3773313583,0.3340252627,0.5725150359,0.646741605) # orientation matrix in ECEF
transform = Metashape.Matrix.Translation(position) * Metashape.Matrix.Rotation(orientation)
transform = chunk.transform.matrix.inv() * transform * Metashape.Matrix.Diag((1, -1, -1, 1))
cameraT = Metashape.Matrix.Translation(transform.translation()) * Metashape.Matrix.Rotation(transform.rotation()) # 4x4 transform matrix 3 translations and 3 rotations
image = chunk.model.renderImage(cameraT, chunk.sensors[0].calibration)
image.save(r"C:\Users\Thomas\Documents\render.jpg")
Unfortunately, the .path file contain only quaternions infos as qX, qY, qZ, qW. def euler_from_quaternion(x, y, z, w):
"""
Convert a quaternion into euler angles (roll, pitch, yaw)
roll is rotation around x in radians (counterclockwise)
pitch is rotation around y in radians (counterclockwise)
yaw is rotation around z in radians (counterclockwise)
"""
t0 = +2.0 * (w * x + y * z)
t1 = +1.0 - 2.0 * (x * x + y * y)
roll_x = math.atan2(t0, t1)*(180/math.pi)
t2 = +2.0 * (w * y - z * x)
t2 = +1.0 if t2 > +1.0 else t2
t2 = -1.0 if t2 < -1.0 else t2
pitch_y = math.asin(t2)*(180/math.pi)
t3 = +2.0 * (w * z + x * y)
t4 = +1.0 - 2.0 * (y * y + z * z)
yaw_z = math.atan2(t3, t4)*(180/math.pi)
return -yaw_z, roll_x, pitch_y # in degrees
print(euler_from_quaternion(0.3773313583,0.3340252627,0.5725150359,0.646741605)) # quaternions corresponding to (276.698,60.522,0)
doc = Metashape.Document()
doc.open(path=r"C:\Users\Thomas\Documents\projet_x2.psx")
chunk = doc.chunk
camtrack = Metashape.CameraTrack
camtrack.chunk = chunk
camtrack.save(r"C:\Users\Thomas\Documents\CameraTrack.path")
and i always get the error message TypeError: descriptor 'save' for 'Metashape.Metashape.CameraTrack' objects doesn't apply to a 'str' object
that i can't solve. save(path[, file_format, max_waypoints, projection])
Save camera track to file.
Parameters
• path (string) – Path to camera track file
• file_format (string) – File format. “deduce”: - Deduce from extension, “path”: Path,
“earth”: Google Earth KML, “pilot”: DJI Pilot KML, “trinity”: Asctec Trinity CSV, “au-
topilot”: Asctec Autopilot CSV, “litchi”: Litchi CSV
• max_waypoints (int) – Max waypoints per flight
• projection (CoordinateSystem) – Camera track coordinate system.
camtrack.load(r"C:\Users\Thomas\Documents\CameraTrack_001.path")
but i get the same error. self.chunk.exportModel(path=filename, binary=True,
save_texture=False, save_uv=False, save_normals=False, save_colors=False,
save_cameras=False, save_markers=False, save_udim=False, save_alpha=False,
save_comment=False,
format=Metashape.ModelFormatPLY)
else:
self.chunk.dense_cloud = None
for dense_cloud in self.chunk.dense_clouds:
if dense_cloud.key == key:
self.chunk.dense_cloud = dense_cloud
assert(self.chunk.dense_cloud is not None)
self.chunk.exportPoints(path=filename,
source_data=Metashape.DenseCloudData, binary=True,
save_normals=False, save_colors=False, save_classes=False, save_confidence=False,
save_comment=False,
format=Metashape.PointsFormatPLY)
v1 = read_ply(tmp1.name)
v2 = read_ply(tmp2.name)
Traceback (most recent call last):
File "C:/Users/Thomas/Desktop/pycharm/crash_code.py", line 532, in <module>
align_two_point_clouds(chunk.models[-1],chunk.models[1], scale_ratio = 0.99358)
File "C:/Users/Thomas/Desktop/pycharm/crash_code.py", line 27, in align_two_point_clouds
assert(isinstance(points1_source, np.ndarray) and isinstance(points2_target, np.ndarray))
AssertionError