diff --git a/.gitignore b/.gitignore index c0c4e1d33d..a0f8ab8193 100644 --- a/.gitignore +++ b/.gitignore @@ -27,5 +27,6 @@ tests/_fake_trajs tests/_fake_results saved_dataset_behavior dataset_backup +video_frames # Jetbrains IDEs .idea/ diff --git a/predicators/behavior_utils/behavior_utils.py b/predicators/behavior_utils/behavior_utils.py index 585c4634d8..994a15e403 100644 --- a/predicators/behavior_utils/behavior_utils.py +++ b/predicators/behavior_utils/behavior_utils.py @@ -898,7 +898,7 @@ def get_relevant_scene_body_ids( return ids -def detect_collision(bodyA: int, object_in_hand: Optional[int] = None) -> bool: +def detect_collision(bodyA: int, ignore_objects: Optional[int] = None) -> bool: """Detects collisions between bodyA in the scene (except for the object in the robot's hand)""" if not isinstance(ignore_objects, list): diff --git a/predicators/behavior_utils/motion_planner_fns.py b/predicators/behavior_utils/motion_planner_fns.py index 02ef4901df..e8c85279b6 100644 --- a/predicators/behavior_utils/motion_planner_fns.py +++ b/predicators/behavior_utils/motion_planner_fns.py @@ -245,17 +245,10 @@ def make_grasp_plan( points = p.getClosestPoints(env.robots[0].get_body_id(), obj.get_body_id(), distance=10)#, linkIndexA=env.robots[0].grasp_point_joint_id) closest_point = min(points, key=lambda x:x[8]) obj_pos = closest_point[6] + x = obj_pos[0] + grasp_offset[0] y = obj_pos[1] + grasp_offset[1] z = obj_pos[2] + grasp_offset[2] - hand_x, hand_y, hand_z = env.robots[0].parts["right_hand"].get_position() - minx = min(x, hand_x) - 0.5 - miny = min(y, hand_y) - 0.5 - minz = min(z, hand_z) - 0.5 - maxx = max(x, hand_x) + 0.5 - maxy = max(y, hand_y) + 0.5 - maxz = max(z, hand_z) + 0.5 - if isinstance(env.robots[0], BehaviorRobot): # compute the angle the hand must be in such that it can @@ -267,6 +260,15 @@ def make_grasp_plan( # https://math.stackexchange.com/questions/180418/ # calculate-rotation-matrix-to-align-vector-a-to-vector # -b-in-3d + + hand_x, hand_y, hand_z = env.robots[0].parts["right_hand"].get_position() + minx = min(x, hand_x) - 0.5 + miny = min(y, hand_y) - 0.5 + minz = min(z, hand_z) - 0.5 + maxx = max(x, hand_x) + 0.5 + maxy = max(y, hand_y) + 0.5 + maxz = max(z, hand_z) + 0.5 + hand_to_obj_vector = np.array(grasp_offset[:3]) hand_to_obj_unit_vector = hand_to_obj_vector / \ np.linalg.norm( diff --git a/predicators/planning.py b/predicators/planning.py index bd776e8295..2e70186cc6 100644 --- a/predicators/planning.py +++ b/predicators/planning.py @@ -524,7 +524,7 @@ def run_low_level_search( nsrt = skeleton[cur_idx] # Ground the NSRT's ParameterizedOption into an _Option. # This invokes the NSRT's sampler. - option = nsrt.sample_option(state, task.goal, rng_sampler) + option = nsrt.sample_option(state, task.goal, skeleton, rng_sampler) plan[cur_idx] = option # Increment num_samples metric by 1 metrics["num_samples"] += 1