Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: initial meshing after edits #511

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 20 additions & 8 deletions pychunkedgraph/app/segmentation/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,11 +189,13 @@ def handle_find_minimal_covering_nodes(table_id, is_binary=True):
): # Process from higher layers to lower layers
if len(node_queue[layer]) == 0:
continue

current_nodes = list(node_queue[layer])

# Call handle_roots to find parents
parents = cg.get_roots(current_nodes, stop_layer=layer + 1, time_stamp=timestamp)
parents = cg.get_roots(
current_nodes, stop_layer=layer + 1, time_stamp=timestamp
)
unique_parents = np.unique(parents)
parent_layers = np.array(
[cg.get_chunk_layer(parent) for parent in unique_parents]
Expand Down Expand Up @@ -312,7 +314,11 @@ def str2bool(v):


def publish_edit(
table_id: str, user_id: str, result: GraphEditOperation.Result, is_priority=True
table_id: str,
user_id: str,
result: GraphEditOperation.Result,
is_priority: bool = True,
remesh: bool = True,
):
import pickle

Expand All @@ -322,6 +328,7 @@ def publish_edit(
"table_id": table_id,
"user_id": user_id,
"remesh_priority": "true" if is_priority else "false",
"remesh": "true" if remesh else "false",
}
payload = {
"operation_id": int(result.operation_id),
Expand All @@ -343,6 +350,7 @@ def handle_merge(table_id, allow_same_segment_merge=False):

nodes = json.loads(request.data)
is_priority = request.args.get("priority", True, type=str2bool)
remesh = request.args.get("remesh", True, type=str2bool)
chebyshev_distance = request.args.get("chebyshev_distance", 3, type=int)

current_app.logger.debug(nodes)
Expand Down Expand Up @@ -391,7 +399,7 @@ def handle_merge(table_id, allow_same_segment_merge=False):
current_app.logger.debug(("lvl2_nodes:", ret.new_lvl2_ids))

if len(ret.new_lvl2_ids) > 0:
publish_edit(table_id, user_id, ret, is_priority=is_priority)
publish_edit(table_id, user_id, ret, is_priority=is_priority, remesh=remesh)

return ret

Expand All @@ -405,6 +413,7 @@ def handle_split(table_id):

data = json.loads(request.data)
is_priority = request.args.get("priority", True, type=str2bool)
remesh = request.args.get("remesh", True, type=str2bool)
mincut = request.args.get("mincut", True, type=str2bool)

current_app.logger.debug(data)
Expand Down Expand Up @@ -457,7 +466,7 @@ def handle_split(table_id):
current_app.logger.debug(("lvl2_nodes:", ret.new_lvl2_ids))

if len(ret.new_lvl2_ids) > 0:
publish_edit(table_id, user_id, ret, is_priority=is_priority)
publish_edit(table_id, user_id, ret, is_priority=is_priority, remesh=remesh)

return ret

Expand All @@ -470,6 +479,7 @@ def handle_undo(table_id):

data = json.loads(request.data)
is_priority = request.args.get("priority", True, type=str2bool)
remesh = request.args.get("remesh", True, type=str2bool)
user_id = str(g.auth_user.get("id", current_app.user_id))

current_app.logger.debug(data)
Expand All @@ -489,7 +499,7 @@ def handle_undo(table_id):
current_app.logger.debug(("lvl2_nodes:", ret.new_lvl2_ids))

if ret.new_lvl2_ids.size > 0:
publish_edit(table_id, user_id, ret, is_priority=is_priority)
publish_edit(table_id, user_id, ret, is_priority=is_priority, remesh=remesh)

return ret

Expand All @@ -502,6 +512,7 @@ def handle_redo(table_id):

data = json.loads(request.data)
is_priority = request.args.get("priority", True, type=str2bool)
remesh = request.args.get("remesh", True, type=str2bool)
user_id = str(g.auth_user.get("id", current_app.user_id))

current_app.logger.debug(data)
Expand All @@ -521,7 +532,7 @@ def handle_redo(table_id):
current_app.logger.debug(("lvl2_nodes:", ret.new_lvl2_ids))

if ret.new_lvl2_ids.size > 0:
publish_edit(table_id, user_id, ret, is_priority=is_priority)
publish_edit(table_id, user_id, ret, is_priority=is_priority, remesh=remesh)

return ret

Expand All @@ -536,6 +547,7 @@ def handle_rollback(table_id):
target_user_id = request.args["user_id"]

is_priority = request.args.get("priority", True, type=str2bool)
remesh = request.args.get("remesh", True, type=str2bool)
skip_operation_ids = np.array(
json.loads(request.args.get("skip_operation_ids", "[]")), dtype=np.uint64
)
Expand All @@ -562,7 +574,7 @@ def handle_rollback(table_id):
raise cg_exceptions.BadRequest(str(e))

if ret.new_lvl2_ids.size > 0:
publish_edit(table_id, user_id, ret, is_priority=is_priority)
publish_edit(table_id, user_id, ret, is_priority=is_priority, remesh=remesh)

return user_operations

Expand Down
19 changes: 19 additions & 0 deletions pychunkedgraph/app/segmentation/v1/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
)

from pychunkedgraph.app import common as app_common
from pychunkedgraph.app import app_utils
from pychunkedgraph.app.app_utils import (
jsonify_with_kwargs,
remap_public,
Expand Down Expand Up @@ -626,3 +627,21 @@ def valid_nodes(table_id):
resp = common.valid_nodes(table_id, is_binary=is_binary)

return jsonify_with_kwargs(resp, int64_as_str=int64_as_str)


@bp.route("/table/<table_id>/supervoxel_lookup", methods=["POST"])
@auth_requires_permission("view")
@remap_public(edit=False)
def handle_supervoxel_lookup(table_id):
int64_as_str = request.args.get("int64_as_str", default=False, type=toboolean)

nodes = json.loads(request.data)
cg = app_utils.get_cg(table_id)
node_ids = []
coords = []
for node in nodes:
node_ids.append(node[0])
coords.append(np.array(node[1:]) / cg.segmentation_resolution)

atomic_ids = app_utils.handle_supervoxel_id_lookup(cg, coords, node_ids)
return jsonify_with_kwargs(atomic_ids, int64_as_str=int64_as_str)
Loading
Loading