From 254e3e76611382268d266fadf0fad36cc5ae9194 Mon Sep 17 00:00:00 2001
From: Ratheesh kumar R <ratheesh.kumar@aot-technologies.com>
Date: Mon, 15 Jul 2024 16:34:17 -0700
Subject: [PATCH 01/10] Resolving multiple migration head

---
 spiffworkflow-backend/migrations/versions/c8f64c8333d2_.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/spiffworkflow-backend/migrations/versions/c8f64c8333d2_.py b/spiffworkflow-backend/migrations/versions/c8f64c8333d2_.py
index a70cc4ddc..d242f59c8 100644
--- a/spiffworkflow-backend/migrations/versions/c8f64c8333d2_.py
+++ b/spiffworkflow-backend/migrations/versions/c8f64c8333d2_.py
@@ -1,7 +1,7 @@
 """empty message
 
 Revision ID: c8f64c8333d2
-Revises: d4b900e71852
+Revises: ffef09e6ddf1
 Create Date: 2024-06-14 16:41:02.361125
 
 """
@@ -11,7 +11,7 @@
 
 # revision identifiers, used by Alembic.
 revision = 'c8f64c8333d2'
-down_revision = 'd4b900e71852'
+down_revision = 'ffef09e6ddf1'
 branch_labels = None
 depends_on = None
 

From 678cc4fcd13ec83de264717a28ba980786a7dc4b Mon Sep 17 00:00:00 2001
From: Ratheesh kumar R <ratheesh.kumar@aot-technologies.com>
Date: Wed, 17 Jul 2024 13:10:16 -0700
Subject: [PATCH 02/10] Converting Spliff to microfront end

---
 spiffworkflow-frontend/package-lock.json      |  61 +++++++
 spiffworkflow-frontend/package.json           |   7 +-
 spiffworkflow-frontend/src/AppSpa.tsx         |  55 ++++++
 .../src/ContainerForExtensionsMicro.tsx       | 161 ++++++++++++++++++
 .../src/components/LoginHandler.tsx           |   3 +-
 spiffworkflow-frontend/src/config.tsx         |  10 ++
 spiffworkflow-frontend/src/index.css          |   8 +
 .../src/routes/BaseRoutesMicro.tsx            |  89 ++++++++++
 .../src/routes/HelloWorld.tsx                 |   8 +
 .../src/services/HttpService.ts               |   5 +-
 .../src/services/UserService.ts               |  18 +-
 spiffworkflow-frontend/src/spa.tsx            |  30 ++++
 spiffworkflow-frontend/vite.micro.config.ts   |  36 ++++
 13 files changed, 483 insertions(+), 8 deletions(-)
 create mode 100644 spiffworkflow-frontend/src/AppSpa.tsx
 create mode 100644 spiffworkflow-frontend/src/ContainerForExtensionsMicro.tsx
 create mode 100644 spiffworkflow-frontend/src/routes/BaseRoutesMicro.tsx
 create mode 100644 spiffworkflow-frontend/src/routes/HelloWorld.tsx
 create mode 100644 spiffworkflow-frontend/src/spa.tsx
 create mode 100644 spiffworkflow-frontend/vite.micro.config.ts

diff --git a/spiffworkflow-frontend/package-lock.json b/spiffworkflow-frontend/package-lock.json
index 303a478a1..1543c326d 100644
--- a/spiffworkflow-frontend/package-lock.json
+++ b/spiffworkflow-frontend/package-lock.json
@@ -100,8 +100,10 @@
         "nice-select2": "^2.1.0",
         "prettier": "^3.3.2",
         "safe-regex": "^2.1.1",
+        "single-spa-react": "^6.0.1",
         "tiny-svg": "^2.2.3",
         "ts-migrate": "^0.1.30",
+        "vite-plugin-single-spa": "^0.7.0",
         "vitest": "^1.5.0"
       }
     },
@@ -8072,6 +8074,12 @@
         "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
       }
     },
+    "node_modules/browserslist-config-single-spa": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/browserslist-config-single-spa/-/browserslist-config-single-spa-1.0.1.tgz",
+      "integrity": "sha512-nqOxTbatv6FcdgBvUTuH4MuojMZwvskspz5Y4dmpVcKd0uaQY8KEl3iALWus16+AwPVe3BIerBNEgELyaHZcQg==",
+      "dev": true
+    },
     "node_modules/bser": {
       "version": "2.1.1",
       "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz",
@@ -22312,6 +22320,28 @@
       "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
       "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="
     },
+    "node_modules/single-spa-react": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/single-spa-react/-/single-spa-react-6.0.1.tgz",
+      "integrity": "sha512-kRFQN0uAibFV7QrJCG1pau7pixPgToQA4f/Pcn2Ojfs3ETbmXhaGkViSE1KIH0ZsxUu4JcaE2ArNMn5iK8srqA==",
+      "dev": true,
+      "dependencies": {
+        "browserslist-config-single-spa": "^1.0.1"
+      },
+      "peerDependencies": {
+        "@types/react": "*",
+        "@types/react-dom": "*",
+        "react": "*"
+      },
+      "peerDependenciesMeta": {
+        "@types/react": {
+          "optional": true
+        },
+        "@types/react-dom": {
+          "optional": true
+        }
+      }
+    },
     "node_modules/sisteransi": {
       "version": "1.0.5",
       "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
@@ -25442,6 +25472,15 @@
         "url": "https://opencollective.com/vitest"
       }
     },
+    "node_modules/vite-plugin-single-spa": {
+      "version": "0.7.0",
+      "resolved": "https://registry.npmjs.org/vite-plugin-single-spa/-/vite-plugin-single-spa-0.7.0.tgz",
+      "integrity": "sha512-oM0J396iFfFx/PNwqEcvJUfGRORvVOsi21zOkSifMyMtLNfbeT8z5ZLkwkkCRdyUsXreHSmAu7JtRIYWlznyIQ==",
+      "dev": true,
+      "peerDependencies": {
+        "vite": "^4.4.6 || ^5.0.0"
+      }
+    },
     "node_modules/vite-tsconfig-paths": {
       "version": "4.3.2",
       "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-4.3.2.tgz",
@@ -31817,6 +31856,12 @@
         "update-browserslist-db": "^1.0.13"
       }
     },
+    "browserslist-config-single-spa": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/browserslist-config-single-spa/-/browserslist-config-single-spa-1.0.1.tgz",
+      "integrity": "sha512-nqOxTbatv6FcdgBvUTuH4MuojMZwvskspz5Y4dmpVcKd0uaQY8KEl3iALWus16+AwPVe3BIerBNEgELyaHZcQg==",
+      "dev": true
+    },
     "bser": {
       "version": "2.1.1",
       "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz",
@@ -42431,6 +42476,15 @@
         }
       }
     },
+    "single-spa-react": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/single-spa-react/-/single-spa-react-6.0.1.tgz",
+      "integrity": "sha512-kRFQN0uAibFV7QrJCG1pau7pixPgToQA4f/Pcn2Ojfs3ETbmXhaGkViSE1KIH0ZsxUu4JcaE2ArNMn5iK8srqA==",
+      "dev": true,
+      "requires": {
+        "browserslist-config-single-spa": "^1.0.1"
+      }
+    },
     "sisteransi": {
       "version": "1.0.5",
       "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
@@ -44869,6 +44923,13 @@
         "vite": "^5.0.0"
       }
     },
+    "vite-plugin-single-spa": {
+      "version": "0.7.0",
+      "resolved": "https://registry.npmjs.org/vite-plugin-single-spa/-/vite-plugin-single-spa-0.7.0.tgz",
+      "integrity": "sha512-oM0J396iFfFx/PNwqEcvJUfGRORvVOsi21zOkSifMyMtLNfbeT8z5ZLkwkkCRdyUsXreHSmAu7JtRIYWlznyIQ==",
+      "dev": true,
+      "requires": {}
+    },
     "vite-tsconfig-paths": {
       "version": "4.3.2",
       "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-4.3.2.tgz",
diff --git a/spiffworkflow-frontend/package.json b/spiffworkflow-frontend/package.json
index cb4e09f68..8feb40c89 100644
--- a/spiffworkflow-frontend/package.json
+++ b/spiffworkflow-frontend/package.json
@@ -71,12 +71,13 @@
   },
   "scripts": {
     "build": "vite build",
+    "build-micro": "vite build --base=/spliff --config vite.micro.config.ts --mode production",
     "eslint": "./node_modules/.bin/eslint src --ext .js,.jsx,.ts,.tsx",
     "format": "prettier --write src/**/*.[tj]s{,x}",
     "lint": "npm run eslint && npm run typecheck",
     "lint:fix": "./node_modules/.bin/eslint --fix src --ext .js,.jsx,.ts,.tsx",
     "serve": "vite preview",
-    "start": "VITE_VERSION_INFO='{\"version\":\"local\"}' vite",
+    "start": "vite",
     "test": "vitest run --coverage",
     "typecheck": "./node_modules/.bin/tsc --noEmit"
   },
@@ -114,6 +115,7 @@
     "cypress-slow-down": "^1.3.1",
     "cypress-vite": "^1.5.0",
     "eslint": "^8.56.0",
+    "eslint_d": "^12.2.0",
     "eslint-config-airbnb": "^19.0.4",
     "eslint-config-prettier": "^9.1.0",
     "eslint-plugin-cypress": "^3.3.0",
@@ -124,14 +126,15 @@
     "eslint-plugin-react-hooks": "^4.6.2",
     "eslint-plugin-sonarjs": "^1.0.3",
     "eslint-plugin-unused-imports": "^3.2.0",
-    "eslint_d": "^12.2.0",
     "inherits-browser": "^0.0.1",
     "jsdom": "^24.0.0",
     "nice-select2": "^2.1.0",
     "prettier": "^3.3.2",
     "safe-regex": "^2.1.1",
+    "single-spa-react": "^6.0.1",
     "tiny-svg": "^2.2.3",
     "ts-migrate": "^0.1.30",
+    "vite-plugin-single-spa": "^0.7.0",
     "vitest": "^1.5.0"
   }
 }
diff --git a/spiffworkflow-frontend/src/AppSpa.tsx b/spiffworkflow-frontend/src/AppSpa.tsx
new file mode 100644
index 000000000..cc67489fc
--- /dev/null
+++ b/spiffworkflow-frontend/src/AppSpa.tsx
@@ -0,0 +1,55 @@
+import { defineAbility } from '@casl/ability';
+
+import { createBrowserRouter, Outlet, RouterProvider } from 'react-router-dom';
+import { ReactQueryDevtools } from '@tanstack/react-query-devtools';
+import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
+import { AbilityContext } from './contexts/Can';
+import APIErrorProvider from './contexts/APIErrorContext';
+import ContainerForExtensionsMicro from './ContainerForExtensionsMicro';
+import { BASENAME_URL } from './config';
+
+const queryClient = new QueryClient();
+
+export default function AppSpa() {
+  const ability = defineAbility(() => {});
+  const routeComponents = () => {
+    return [
+      {
+        path: '*',
+        element: <ContainerForExtensionsMicro />,
+      },
+    ];
+  };
+
+  /**
+   * Note that QueryClientProvider and ReactQueryDevTools
+   * are React Qery, now branded under the Tanstack packages.
+   * https://tanstack.com/query/latest
+   */
+  const layout = () => {
+    return (
+      <div className="cds--white">
+        <QueryClientProvider client={queryClient}>
+          <APIErrorProvider>
+            <AbilityContext.Provider value={ability}>
+              <Outlet />
+              <ReactQueryDevtools initialIsOpen={false} />
+            </AbilityContext.Provider>
+          </APIErrorProvider>
+        </QueryClientProvider>
+      </div>
+    );
+  };
+  const router = createBrowserRouter([
+    {
+      path: '*',
+      Component: layout,
+      children: routeComponents(),
+    },
+  ],
+  {
+    basename: BASENAME_URL
+  }
+);
+  return <RouterProvider router={router} />;
+}
diff --git a/spiffworkflow-frontend/src/ContainerForExtensionsMicro.tsx b/spiffworkflow-frontend/src/ContainerForExtensionsMicro.tsx
new file mode 100644
index 000000000..14533d389
--- /dev/null
+++ b/spiffworkflow-frontend/src/ContainerForExtensionsMicro.tsx
@@ -0,0 +1,161 @@
+import { Content } from '@carbon/react';
+import { Routes, Route, useLocation } from 'react-router-dom';
+import React, { useEffect, useState } from 'react';
+import { ErrorBoundary } from 'react-error-boundary';
+
+import ScrollToTop from './components/ScrollToTop';
+import EditorRoutes from './routes/EditorRoutes';
+import Extension from './routes/Extension';
+import { useUriListForPermissions } from './hooks/UriListForPermissions';
+import { PermissionsToCheck, ProcessFile, ProcessModel } from './interfaces';
+import { usePermissionFetcher } from './hooks/PermissionService';
+import {
+  ExtensionUiSchema,
+  UiSchemaUxElement,
+} from './extension_ui_schema_interfaces';
+import HttpService from './services/HttpService';
+import { ErrorBoundaryFallback } from './ErrorBoundaryFallack';
+import BaseRoutes from './routes/BaseRoutes';
+import BaseRoutesMicro from './routes/BaseRoutesMicro';
+import BackendIsDown from './routes/BackendIsDown';
+import Login from './routes/Login';
+import NavigationBar from './components/NavigationBar';
+import useAPIError from './hooks/UseApiError';
+
+export default function ContainerForExtensionsMicro() {
+  const [backendIsUp, setBackendIsUp] = useState<boolean | null>(null);
+  const [extensionUxElements, setExtensionUxElements] = useState<
+    UiSchemaUxElement[] | null
+  >(null);
+
+  let contentClassName = 'main-site-body-centered';
+  if (window.location.pathname.startsWith('/editor/')) {
+    contentClassName = 'no-center-stuff';
+  }
+  const { targetUris } = useUriListForPermissions();
+  const permissionRequestData: PermissionsToCheck = {
+    [targetUris.extensionListPath]: ['GET'],
+  };
+  const { ability, permissionsLoaded } = usePermissionFetcher(
+    permissionRequestData,
+  );
+
+  const { removeError } = useAPIError();
+
+  const location = useLocation();
+
+  // never carry an error message across to a different path
+  useEffect(() => {
+    removeError();
+    // if we include the removeError function to the dependency array of this useEffect, it causes
+    // an infinite loop where the page with the error adds the error,
+    // then this runs and it removes the error, etc. it is ok not to include it here, i think, because it never changes.
+    // eslint-disable-next-line react-hooks/exhaustive-deps
+  }, [location.pathname]);
+
+  // eslint-disable-next-line sonarjs/cognitive-complexity
+  useEffect(() => {
+    const processExtensionResult = (processModels: ProcessModel[]) => {
+      const eni: UiSchemaUxElement[] = processModels
+        .map((processModel: ProcessModel) => {
+          const extensionUiSchemaFile = processModel.files.find(
+            (file: ProcessFile) => file.name === 'extension_uischema.json',
+          );
+          if (extensionUiSchemaFile && extensionUiSchemaFile.file_contents) {
+            try {
+              const extensionUiSchema: ExtensionUiSchema = JSON.parse(
+                extensionUiSchemaFile.file_contents,
+              );
+              if (
+                extensionUiSchema &&
+                extensionUiSchema.ux_elements &&
+                !extensionUiSchema.disabled
+              ) {
+                return extensionUiSchema.ux_elements;
+              }
+            } catch (jsonParseError: any) {
+              console.error(
+                `Unable to get navigation items for ${processModel.id}`,
+              );
+            }
+          }
+          return [] as UiSchemaUxElement[];
+        })
+        .flat();
+      if (eni) {
+        setExtensionUxElements(eni);
+      }
+    };
+
+    const getExtensions = () => {
+      setBackendIsUp(true);
+      if (!permissionsLoaded) {
+        return;
+      }
+      if (ability.can('GET', targetUris.extensionListPath)) {
+        HttpService.makeCallToBackend({
+          path: targetUris.extensionListPath,
+          successCallback: processExtensionResult,
+        });
+      } else {
+        // set to an empty array so we know that it loaded
+        setExtensionUxElements([]);
+      }
+    };
+
+    HttpService.makeCallToBackend({
+      path: targetUris.statusPath,
+      successCallback: getExtensions,
+      failureCallback: () => setBackendIsUp(false),
+    });
+  }, [
+    targetUris.extensionListPath,
+    targetUris.statusPath,
+    permissionsLoaded,
+    ability,
+  ]);
+
+  const routeComponents = () => {
+    return (
+      <Routes>
+        <Route
+          path="*"
+          element={<BaseRoutesMicro extensionUxElements={extensionUxElements} />}
+        />
+        <Route path="editor/*" element={<EditorRoutes />} />
+        <Route path="extensions/:page_identifier" element={<Extension />} />
+        <Route path="login" element={<Login />} />
+      </Routes>
+    );
+  };
+
+  const backendIsDownPage = () => {
+    return [<BackendIsDown />];
+  };
+
+  const innerComponents = () => {
+    if (backendIsUp === null) {
+      return [];
+    }
+    if (backendIsUp) {
+      return routeComponents();
+    }
+    return backendIsDownPage();
+  };
+
+  return (
+    <>
+      {/* TODO : remove this NavigationBar when we have a new navigation system */}
+      <div className="hidden">
+      <NavigationBar extensionUxElements={extensionUxElements} />
+      </div>
+  
+      <Content className={contentClassName}>
+        <ScrollToTop />
+        <ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
+          {innerComponents()}
+        </ErrorBoundary>
+      </Content>
+    </>
+  );
+}
diff --git a/spiffworkflow-frontend/src/components/LoginHandler.tsx b/spiffworkflow-frontend/src/components/LoginHandler.tsx
index 8f8232f05..e999ee6ad 100644
--- a/spiffworkflow-frontend/src/components/LoginHandler.tsx
+++ b/spiffworkflow-frontend/src/components/LoginHandler.tsx
@@ -1,12 +1,13 @@
 import { useEffect } from 'react';
 import { useNavigate } from 'react-router-dom';
 import UserService from '../services/UserService';
+import { BASENAME_URL } from '../config';
 
 export default function LoginHandler() {
   const navigate = useNavigate();
   useEffect(() => {
     if (!UserService.isLoggedIn()) {
-      navigate(`/login?original_url=${UserService.getCurrentLocation()}`);
+      navigate(BASENAME_URL+ `/login?original_url=${UserService.getCurrentLocation()}`);
     }
   }, [navigate]);
   return null;
diff --git a/spiffworkflow-frontend/src/config.tsx b/spiffworkflow-frontend/src/config.tsx
index 02fca38f1..958057502 100644
--- a/spiffworkflow-frontend/src/config.tsx
+++ b/spiffworkflow-frontend/src/config.tsx
@@ -16,6 +16,8 @@ let spiffEnvironment = '';
 let appRoutingStrategy = 'subdomain_based';
 let backendBaseUrl = null;
 let documentationUrl = null;
+let baseNameUrl = '';
+
 if ('spiffworkflowFrontendJsenv' in window) {
   if ('APP_ROUTING_STRATEGY' in window.spiffworkflowFrontendJsenv) {
     appRoutingStrategy = window.spiffworkflowFrontendJsenv.APP_ROUTING_STRATEGY;
@@ -31,6 +33,10 @@ if ('spiffworkflowFrontendJsenv' in window) {
   }
 }
 
+if(import.meta.env.VITE_BASENAME_URL) {
+  baseNameUrl = import.meta.env.VITE_BASENAME_URL;
+}
+
 if (!backendBaseUrl) {
   let hostAndPortAndPathPrefix;
   if (appRoutingStrategy === 'subdomain_based') {
@@ -46,6 +52,8 @@ if (!backendBaseUrl) {
     if (!Number.isNaN(Number(port))) {
       serverPort = Number(port) - 1;
     }
+    // TODO: Server port overrided here, Need to revisit
+    serverPort = 7000;
     hostAndPortAndPathPrefix = `${hostname}:${serverPort}`;
     protocol = 'http';
 
@@ -66,6 +74,7 @@ if (!backendBaseUrl.endsWith('/v1.0')) {
 
 const BACKEND_BASE_URL = backendBaseUrl;
 const DOCUMENTATION_URL = documentationUrl;
+const BASENAME_URL = baseNameUrl;
 
 const PROCESS_STATUSES = [
   'complete',
@@ -134,6 +143,7 @@ export {
   DATE_FORMAT_FOR_DISPLAY,
   DATE_RANGE_DELIMITER,
   BACKEND_BASE_URL,
+  BASENAME_URL,
   DOCUMENTATION_URL,
   PROCESS_STATUSES,
   SPIFF_ENVIRONMENT,
diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css
index 52407ae8b..7686e8aff 100644
--- a/spiffworkflow-frontend/src/index.css
+++ b/spiffworkflow-frontend/src/index.css
@@ -1162,3 +1162,11 @@ div.retrievalExpressionsForm {
   position: relative;
   left: 6px;
 }
+
+/* TODO: Microfrontend css */
+.hidden {
+  display: none;
+}
+.cds--content {
+  margin-top: 50px;
+}
\ No newline at end of file
diff --git a/spiffworkflow-frontend/src/routes/BaseRoutesMicro.tsx b/spiffworkflow-frontend/src/routes/BaseRoutesMicro.tsx
new file mode 100644
index 000000000..0752fe94f
--- /dev/null
+++ b/spiffworkflow-frontend/src/routes/BaseRoutesMicro.tsx
@@ -0,0 +1,89 @@
+import { Route, Routes } from 'react-router-dom';
+import { Loading } from '@carbon/react';
+import Configuration from './Configuration';
+import MessageListPage from './MessageListPage';
+import DataStoreRoutes from './DataStoreRoutes';
+import { UiSchemaUxElement } from '../extension_ui_schema_interfaces';
+import HomeRoutes from './HomeRoutes';
+import ProcessGroupRoutes from './ProcessGroupRoutes';
+import ProcessModelRoutes from './ProcessModelRoutes';
+import ProcessInstanceRoutes from './ProcessInstanceRoutes';
+import ErrorDisplay from '../components/ErrorDisplay';
+import ProcessInstanceShortLink from './ProcessInstanceShortLink';
+import About from './About';
+import Page404 from './Page404';
+import AdminRedirect from './AdminRedirect';
+import RootRoute from './RootRoute';
+import LoginHandler from '../components/LoginHandler';
+import { ExtensionUxElementMap } from '../components/ExtensionUxElementForDisplay';
+import Extension from './Extension';
+import HelloWorld from './HelloWorld';
+
+type OwnProps = {
+  extensionUxElements?: UiSchemaUxElement[] | null;
+};
+
+export default function BaseRoutesMicro({ extensionUxElements }: OwnProps) {
+  const elementCallback = (uxElement: UiSchemaUxElement) => {
+    return (
+      <Route
+        path={uxElement.page}
+        key={uxElement.page}
+        element={<Extension pageIdentifier={uxElement.page} />}
+      />
+    );
+  };
+
+  if (extensionUxElements !== null) {
+    const extensionRoutes = ExtensionUxElementMap({
+      displayLocation: 'routes',
+      elementCallback,
+      extensionUxElements,
+    });
+
+    return (
+      <div className="fixed-width-container">
+        <ErrorDisplay />
+        <LoginHandler />
+        <Routes>
+          {extensionRoutes}
+          <Route path="/" element={<HelloWorld />} />
+          <Route path="/root" element={<RootRoute />} />
+          <Route path="tasks/*" element={<HomeRoutes />} />
+          <Route path="process-groups/*" element={<ProcessGroupRoutes />} />
+          <Route path="process-models/*" element={<ProcessModelRoutes />} />
+          <Route
+            path="process-instances/*"
+            element={<ProcessInstanceRoutes />}
+          />
+          <Route
+            path="i/:process_instance_id"
+            element={<ProcessInstanceShortLink />}
+          />
+          <Route
+            path="configuration/*"
+            element={
+              <Configuration extensionUxElements={extensionUxElements} />
+            }
+          />
+          <Route path="messages" element={<MessageListPage />} />
+          <Route path="data-stores/*" element={<DataStoreRoutes />} />
+          <Route path="about" element={<About />} />
+          <Route path="admin/*" element={<AdminRedirect />} />
+          <Route path="/*" element={<Page404 />} />
+        </Routes>
+      </div>
+    );
+  }
+
+  const style = { margin: '50px 0 50px 50px' };
+  return (
+    <div className="fixed-width-container">
+      <Loading
+        description="Active loading indicator"
+        withOverlay={false}
+        style={style}
+      />
+    </div>
+  );
+}
diff --git a/spiffworkflow-frontend/src/routes/HelloWorld.tsx b/spiffworkflow-frontend/src/routes/HelloWorld.tsx
new file mode 100644
index 000000000..a1b80cc57
--- /dev/null
+++ b/spiffworkflow-frontend/src/routes/HelloWorld.tsx
@@ -0,0 +1,8 @@
+// src/components/HelloWorld.tsx
+import React from 'react';
+
+const HelloWorld: React.FC = () => {
+  return <div>Hello World</div>;
+};
+
+export default HelloWorld;
diff --git a/spiffworkflow-frontend/src/services/HttpService.ts b/spiffworkflow-frontend/src/services/HttpService.ts
index 7bd096f05..2c2e89ea9 100644
--- a/spiffworkflow-frontend/src/services/HttpService.ts
+++ b/spiffworkflow-frontend/src/services/HttpService.ts
@@ -2,6 +2,7 @@
 import { BACKEND_BASE_URL } from '../config';
 import { objectIsEmpty } from '../helpers';
 import UserService from './UserService';
+import { BASENAME_URL } from '../config';
 
 const HttpMethods = {
   GET: 'GET',
@@ -183,9 +184,9 @@ backendCallProps) => {
         }
       } else if (
         !UserService.isLoggedIn() &&
-        window.location.pathname !== '/login'
+        window.location.pathname !== BASENAME_URL + '/login'
       ) {
-        window.location.href = `/login?original_url=${UserService.getCurrentLocation()}`;
+        window.location.href = BASENAME_URL + `/login?original_url=${UserService.getCurrentLocation()}`;
       }
     });
 };
diff --git a/spiffworkflow-frontend/src/services/UserService.ts b/spiffworkflow-frontend/src/services/UserService.ts
index fd0f7500a..f24895be2 100644
--- a/spiffworkflow-frontend/src/services/UserService.ts
+++ b/spiffworkflow-frontend/src/services/UserService.ts
@@ -45,13 +45,25 @@ const checkPathForTaskShowParams = (
 
 // required for logging out
 const getIdToken = () => {
-  return getCookie('id_token');
+  const id_token = getCookie('id_token');
+  if (id_token) {
+    return id_token;
+  }
+  return getAccessToken();
 };
 const getAccessToken = () => {
-  return getCookie('access_token');
+  const accessToken = getCookie('access_token');
+  if (accessToken) {
+    return accessToken;
+  }
+  return localStorage.getItem('AUTH_TOKEN');
 };
 const getAuthenticationIdentifier = () => {
-  return getCookie('authentication_identifier');
+  const authentication_identifier = getCookie('authentication_identifier');
+  if (authentication_identifier) {
+    return authentication_identifier;
+  }
+  return 'default'
 };
 
 const isLoggedIn = () => {
diff --git a/spiffworkflow-frontend/src/spa.tsx b/spiffworkflow-frontend/src/spa.tsx
new file mode 100644
index 000000000..df621a375
--- /dev/null
+++ b/spiffworkflow-frontend/src/spa.tsx
@@ -0,0 +1,30 @@
+// src/spa.tsx
+
+import React from 'react';
+import ReactDOMClient from 'react-dom/client';
+// @ts-expect-error
+import singleSpaReact from 'single-spa-react';
+import App from './App';
+import AppSpa from './AppSpa';
+import { cssLifecycleFactory } from 'vite-plugin-single-spa/ex';
+
+// TODO: Check if needed. Added for SPA
+import './index.scss';
+import './index.css';
+
+const lc = singleSpaReact({
+    React,
+    ReactDOMClient,
+    rootComponent: AppSpa,
+    errorBoundary(err: any, _info: any, _props: any) {
+        return <div>Error: {err}</div>
+    }
+});
+
+// IMPORTANT:  The argument passed here depends on the file name.
+const cssLc = cssLifecycleFactory('spa');
+
+export const bootstrap = [cssLc.bootstrap, lc.bootstrap];
+export const mount = [cssLc.mount, lc.mount];
+export const unmount = [cssLc.unmount, lc.unmount];
+export const update = [lc.update];
\ No newline at end of file
diff --git a/spiffworkflow-frontend/vite.micro.config.ts b/spiffworkflow-frontend/vite.micro.config.ts
new file mode 100644
index 000000000..a72e69ae8
--- /dev/null
+++ b/spiffworkflow-frontend/vite.micro.config.ts
@@ -0,0 +1,36 @@
+// vite.config.ts for a Vite + React project
+import { defineConfig } from 'vite';
+import react from "@vitejs/plugin-react";
+import vitePluginSingleSpa from "vite-plugin-single-spa";
+import path from 'path';
+
+// https://vitejs.dev/config/
+export default defineConfig({
+  plugins: [
+    react(),
+    vitePluginSingleSpa({
+      serverPort: 4101,
+      spaEntryPoints: "src/spa.tsx",
+    }),
+  ],
+  build: {
+    rollupOptions: {
+      output: {
+        format: 'amd', // Use AMD module format
+      }
+    }
+  },
+  // To load assets from the micro frontend, we need to configure the base path
+  // currently configured to the http-server path of local
+  experimental: {
+    renderBuiltUrl(filename, { hostId, hostType, type }) {
+      if (type === 'public') {
+        return 'http://172.31.80.1:8080/' + filename;
+      } else if (path.extname(hostId) !== '.js') {
+        return 'http://172.31.80.1:8080/' + filename;
+      } else {
+        return 'http://172.31.80.1:8080/' + filename;
+      }
+    }
+  }
+});
\ No newline at end of file

From c0cc9d8cb71ccab3f4b3f518b95fc914dccf0aba Mon Sep 17 00:00:00 2001
From: Ratheesh kumar R <ratheesh.kumar@aot-technologies.com>
Date: Mon, 22 Jul 2024 10:36:56 -0700
Subject: [PATCH 03/10] Updating process model to store bpmn files as binary in
 database

---
 .../migrations/versions/1801292017d5_.py      |  2 +-
 .../models/process_model.py                   | 27 +++++++++++++++----
 2 files changed, 23 insertions(+), 6 deletions(-)

diff --git a/spiffworkflow-backend/migrations/versions/1801292017d5_.py b/spiffworkflow-backend/migrations/versions/1801292017d5_.py
index 463b0a07f..a7486c359 100644
--- a/spiffworkflow-backend/migrations/versions/1801292017d5_.py
+++ b/spiffworkflow-backend/migrations/versions/1801292017d5_.py
@@ -26,7 +26,7 @@ def upgrade():
     sa.Column('is_executable', sa.Boolean(), nullable=True),
     sa.Column('fault_or_suspend_on_exception', sa.String(), nullable=True),
     sa.Column('process_group', sa.String(), nullable=True),
-    sa.Column('content', sa.Text(), nullable=True),
+    sa.Column('content', sa.LargeBinary(), nullable=True),
     sa.Column('type', sa.String(), nullable=True),
     sa.Column('bpmn_version_control_identifier', sa.String(), nullable=True),
     sa.PrimaryKeyConstraint('process_id')
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py
index 5211975b7..7e925d602 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py
@@ -12,6 +12,8 @@
 
 import enum
 import os
+from datetime import datetime
+
 from dataclasses import dataclass
 from dataclasses import field
 from typing import Any
@@ -21,7 +23,7 @@
 from marshmallow.decorators import post_load
 
 from spiffworkflow_backend.interfaces import ProcessGroupLite
-from spiffworkflow_backend.models.file import File
+from spiffworkflow_backend.models.file import File, CONTENT_TYPES
 
 # we only want to save these items to the json file
 PROCESS_MODEL_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION = [
@@ -54,12 +56,13 @@ class ProcessModelInfo(SpiffworkflowBaseDBModel):
     process_group=db.Column(db.String, default="formsflow")
 
     # files: list[File] | None = field(default_factory=list[File])
-    content = db.Column(db.Text)
+    content = db.Column(db.LargeBinary)
     type = db.Column(db.String, default="bpmn") # BPMN or DMN
 
     # just for the API
     # parent_groups: list[ProcessGroupLite] | None = None
     bpmn_version_control_identifier= db.Column(db.String)
+    
 
     @property
     def primary_file_name(self):
@@ -87,7 +90,20 @@ def display_order(self):
 
     @property
     def files(self):
-        return [self.content]
+        file_objects = []
+        for content in [self.content]:
+            if content:
+                file = File(
+                    content_type=CONTENT_TYPES.get(self.type, "application/octet-stream"),
+                    name=(self.display_name or "bpmn_file") + '.bpmn',
+                    type=self.type,
+                    last_modified=datetime.now(),  # Placeholder for actual last modified time
+                    size=len(content),
+                    file_contents=content,
+                    process_model_id=self.id,
+                )
+                file_objects.append(file)
+        return file_objects
 
     @property
     def parent_groups(self):
@@ -122,11 +138,12 @@ def modify_process_identifier_for_path_param(cls, identifier: str) -> str:
 
     def serialized(self) -> dict[str, Any]:
         file_objects = self.files
-        dictionary = self.__dict__
+        dictionary = {k: v for k, v in self.__dict__.items() if k != "_sa_instance_state" and k != "content"}
         if file_objects is not None:
             serialized_files = []
             for file in file_objects:
-                serialized_files.append(file.serialized())
+                if file is not None:
+                    serialized_files.append(file.serialized())
             dictionary["files"] = serialized_files
         return dictionary
 

From 4d43f6a32c425d27cf096e48f6fd45e8a6a50dad Mon Sep 17 00:00:00 2001
From: Ratheesh kumar R <ratheesh.kumar@aot-technologies.com>
Date: Tue, 23 Jul 2024 12:02:19 -0700
Subject: [PATCH 04/10] Workflow listing with count and search endpoints

---
 .../src/spiffworkflow_backend/__init__.py     |   2 +-
 .../src/spiffworkflow_backend/api.yml         | 113 ++++++++++++++++++
 .../routes/process_models_controller.py       |  69 ++++++++++-
 3 files changed, 182 insertions(+), 2 deletions(-)

diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py
index 05e809f50..6bda8fab6 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py
@@ -91,7 +91,7 @@ def create_app() -> flask.app.Flask:
     ]
     CORS(app, origins=origins_re, max_age=3600, supports_credentials=True)
 
-    connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX, pythonic_params=True)
+    connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX, pythonic_params=False)
 
     app.json = MyJSONEncoder(app)
 
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
index c1b8ac405..5131209c3 100755
--- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
@@ -3276,6 +3276,113 @@ paths:
               schema:
                 $ref: "#/components/schemas/ProcessModel"
 
+  /process-definition:
+    parameters:
+      - name: latestVersion
+        in: query
+        required: false
+        description: Return only the latest version of each process definition
+        schema:
+          type: boolean
+      - name: includeProcessDefinitionsWithoutTenantId
+        in: query
+        required: false
+        description: Include process definitions that do not have a tenant ID
+        schema:
+          type: boolean
+      - name: sortBy
+        in: query
+        required: false
+        description: Sort the results by the given field
+        schema:
+          type: string
+      - name: sortOrder
+        in: query
+        required: false
+        description: Sort the results in the specified order (asc or desc)
+        schema:
+          type: string
+      - name: maxResults
+        in: query
+        required: false
+        description: Maximum number of results to return
+        schema:
+          type: integer
+      - name: nameLike
+        in: query
+        required: false
+        description: Filter with name
+        schema:
+          type: string
+    get:
+      operationId: spiffworkflow_backend.routes.process_models_controller.process_definition_list
+      summary: Return a list of process definitions
+      tags:
+        - Process Definitions
+      responses:
+        "200":
+          description: Successfully return the requested process definitions
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: "#/components/schemas/ProcessModel"
+
+  /process-definition/count:
+    parameters:
+      - name: latestVersion
+        in: query
+        required: false
+        description: Return only the latest version of each process definition
+        schema:
+          type: boolean
+      - name: includeProcessDefinitionsWithoutTenantId
+        in: query
+        required: false
+        description: Include process definitions that do not have a tenant ID
+        schema:
+          type: boolean
+      - name: sortBy
+        in: query
+        required: false
+        description: Sort the results by the given field
+        schema:
+          type: string
+      - name: sortOrder
+        in: query
+        required: false
+        description: Sort the results in the specified order (asc or desc)
+        schema:
+          type: string
+      - name: maxResults
+        in: query
+        required: false
+        description: Maximum number of results to return
+        schema:
+          type: integer
+      - name: nameLike
+        in: query
+        required: false
+        description: Filter with name
+        schema:
+          type: string
+    get:
+      operationId: spiffworkflow_backend.routes.process_models_controller.process_definition_list_count
+      summary: Return the count of process models
+      tags:
+        - Process Definitions
+      responses:
+        "200":
+          description: Successfully return the requested process definitions count
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: "#/components/schemas/ProcessModelCount"
+
+
   /key/{process_model_identifier}/start:
     parameters:
       - name: process_model_identifier
@@ -3980,3 +4087,9 @@ components:
         api_key:
           type: string
           nullable: false
+    ProcessModelCount:
+      properties:
+        count:
+          type: number
+          example: 1
+          nullable: false
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py
index a5c3c108b..69c7cfab1 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py
@@ -9,6 +9,7 @@
 
 import connexion  # type: ignore
 import flask.wrappers
+import fnmatch
 from flask import current_app
 from flask import g
 from flask import jsonify
@@ -68,7 +69,7 @@ def process_model_create_formsflow(upload: FileStorage) -> flask.wrappers.Respon
         raise ProcessModelFileInvalidError(f"Received error trying to parse bpmn xml: {str(exception)}") from exception
     if not (process_model_info:= ProcessModelService.find_by_process_id(_key)):
         process_model_info = ProcessModelInfo()  # type: ignore
-    #TODO Check on version management
+    # TODO Check on version management
 
     process_model_info.display_name = _name
     process_model_info.content = _content
@@ -91,6 +92,72 @@ def process_model_create_formsflow(upload: FileStorage) -> flask.wrappers.Respon
     )
 
 
+def process_definition_list(
+    latestVersion: bool | None = False,
+    includeProcessDefinitionsWithoutTenantId: bool | None = False,
+    sortBy: str | None = None,
+    sortOrder: str | None = None,
+    firstResult: int | None = 0,
+    maxResults: int = 100,
+    nameLike: str | None = None,
+    return_count_only: bool | None = False,
+) -> flask.wrappers.Response:
+
+    page = (firstResult // maxResults) + 1
+    per_page = maxResults
+
+    process_models = ProcessModelService.get_process_models_for_api(user=g.user)
+    process_models_to_return = ProcessModelService.get_batch(process_models, page=page, per_page=per_page)
+
+    # Convert to desired format
+    converted_process_models = [
+        {
+            "id": model.id,
+            "key": model.id,  # Assuming 'key' is same as 'id'
+            "tenantId": None,  # TODO: Need to update
+            "name": model.display_name,
+            "description": model.description,
+            "version": 1,  # Assuming version 1
+            "resource": model.files[0].name if model.files else None,
+            "deploymentId": "some_deployment_id",  # TODO: Placeholder, update with actual
+            "suspended": False,  # TODO: Do something based on model.fault_or_suspend_on_exception
+        }
+        for model in process_models_to_return
+    ]
+
+    # Filter by nameLike if provided TODO : Change to filter using db query itself.
+    if nameLike:
+        pattern = nameLike.replace('%', '*').lower()
+        converted_process_models = [
+            model for model in converted_process_models if fnmatch.fnmatch(model["name"].lower(), pattern)
+        ]
+    if return_count_only:
+        return make_response(jsonify({"count": len(converted_process_models)}), 200)
+
+    return make_response(jsonify(converted_process_models), 200)
+
+
+def process_definition_list_count(
+    latestVersion: bool | None = False,
+    includeProcessDefinitionsWithoutTenantId: bool | None = False,
+    sortBy: str | None = None,
+    sortOrder: str | None = None,
+    firstResult: int | None = 0,
+    maxResults: int = 100,
+    nameLike: str | None = None,
+) -> flask.wrappers.Response:
+    return process_definition_list(
+        latestVersion,
+        includeProcessDefinitionsWithoutTenantId,
+        sortBy,
+        sortOrder,
+        firstResult,
+        maxResults,
+        nameLike,
+        True,
+    )
+
+
 def process_model_create(
     modified_process_group_id: str, body: dict[str, str | bool | int | None | list]
 ) -> flask.wrappers.Response:

From 82de145620d30a05e02387dc089a12bd4ff00136 Mon Sep 17 00:00:00 2001
From: Ratheesh kumar R <ratheesh.kumar@aot-technologies.com>
Date: Thu, 25 Jul 2024 09:32:07 -0700
Subject: [PATCH 05/10] Fixing pagination and search from database

---
 .../src/spiffworkflow_backend/api.yml          | 12 ++++++++++++
 .../routes/process_models_controller.py        |  8 +-------
 .../services/process_model_service.py          | 18 +++++++++++++-----
 3 files changed, 26 insertions(+), 12 deletions(-)

diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
index 5131209c3..2b50f7832 100755
--- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
@@ -3302,6 +3302,12 @@ paths:
         description: Sort the results in the specified order (asc or desc)
         schema:
           type: string
+      - name: firstResult
+        in: query
+        required: false
+        description: Count of the first result to return
+        schema:
+          type: integer
       - name: maxResults
         in: query
         required: false
@@ -3355,6 +3361,12 @@ paths:
         description: Sort the results in the specified order (asc or desc)
         schema:
           type: string
+      - name: firstResult
+        in: query
+        required: false
+        description: Count of the first result to return
+        schema:
+          type: integer
       - name: maxResults
         in: query
         required: false
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py
index 69c7cfab1..1f5c2a694 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py
@@ -106,7 +106,7 @@ def process_definition_list(
     page = (firstResult // maxResults) + 1
     per_page = maxResults
 
-    process_models = ProcessModelService.get_process_models_for_api(user=g.user)
+    process_models = ProcessModelService.get_process_models_for_api(user=g.user, filter_by_name=nameLike)
     process_models_to_return = ProcessModelService.get_batch(process_models, page=page, per_page=per_page)
 
     # Convert to desired format
@@ -125,12 +125,6 @@ def process_definition_list(
         for model in process_models_to_return
     ]
 
-    # Filter by nameLike if provided TODO : Change to filter using db query itself.
-    if nameLike:
-        pattern = nameLike.replace('%', '*').lower()
-        converted_process_models = [
-            model for model in converted_process_models if fnmatch.fnmatch(model["name"].lower(), pattern)
-        ]
     if return_count_only:
         return make_response(jsonify({"count": len(converted_process_models)}), 200)
 
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py
index 976d49425..8b1a85d2f 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py
@@ -189,7 +189,7 @@ def get_process_model(cls, process_model_id: str) -> ProcessModelInfo:
         process_model_id is the full path to the model--including groups.
         """
         # Return process model from database
-        #TODO Change for multi tenancy and versions
+        # TODO Change for multi tenancy and versions
         return ProcessModelInfo.query.filter_by(id=process_model_id).first()
 
         # if not os.path.exists(FileSystemService.root_path()):
@@ -206,8 +206,17 @@ def get_process_models(
             process_group_id: str | None = None,
             recursive: bool | None = False,
             include_files: bool | None = False,
+            filter_by_name: str | None = None
     ) -> list[ProcessModelInfo]:
-        process_models = []
+        # TODO ff : Need refinements here
+        query = ProcessModelInfo.query
+
+        # Applying the filter by name if provided
+        if filter_by_name:
+            query = query.filter(ProcessModelInfo.display_name.like(f"{filter_by_name}"))
+
+        process_models_infos = query.all()
+
         # root_path = FileSystemService.root_path()
         # if process_group_id:
         #     awesome_id = process_group_id.replace("/", os.sep)
@@ -221,8 +230,6 @@ def get_process_models(
         #     FileSystemService.standard_directory_predicate(recursive),
         #     FileSystemService.is_process_model_json_file,
         # )
-        #TODO ff : Need refinements here
-        process_models_infos = ProcessModelInfo.query.all()
 
         # for process_models_info in process_models_infos:
         #     process_model = cls.get_process_model_from_path(file)
@@ -246,6 +253,7 @@ def get_process_models_for_api(
             filter_runnable_by_user: bool | None = False,
             filter_runnable_as_extension: bool | None = False,
             include_files: bool | None = False,
+            filter_by_name: str | None = None
     ) -> list[ProcessModelInfo]:
         if filter_runnable_as_extension and filter_runnable_by_user:
             raise Exception(
@@ -254,7 +262,7 @@ def get_process_models_for_api(
 
         # get the full list (before we filter it by the ones you are allowed to start)
         process_models = cls.get_process_models(
-            process_group_id=process_group_id, recursive=recursive, include_files=include_files
+            process_group_id=process_group_id, recursive=recursive, include_files=include_files, filter_by_name=filter_by_name
         )
         process_model_identifiers = [p.id for p in process_models]
 

From 0f141e18dad4a82e1f4054f8d4e5dcf4cec56ecf Mon Sep 17 00:00:00 2001
From: Sumesh Punakkal Kariyil <sumesh.pk@aot-technologies.com>
Date: Fri, 11 Oct 2024 13:17:29 -0700
Subject: [PATCH 06/10] Changes for new rest endpoint

---
 spiffworkflow-backend/migrations/alembic.ini  |   1 +
 .../migrations/versions/da22d9039670_.py      |  40 ++
 .../src/spiffworkflow_backend/api.yml         | 304 ++++++++++++++
 .../models/human_task_user.py                 |   2 +
 .../models/process_instance.py                |   4 +-
 .../routes/authentication_controller.py       |  18 +-
 .../routes/ff_tasks_controller.py             | 391 ++++++++++++++++++
 .../routes/messages_controller.py             |   2 +
 .../routes/process_api_blueprint.py           |  11 +
 .../routes/process_instances_controller.py    |  35 +-
 .../routes/process_models_controller.py       |   1 +
 .../routes/tasks_controller.py                |   8 +-
 .../scripts/get_token.py                      |  31 ++
 .../services/authorization_service.py         |   7 +
 .../services/message_service.py               |   3 +-
 .../services/process_instance_processor.py    |  48 ++-
 .../services/user_service.py                  |  37 ++
 17 files changed, 916 insertions(+), 27 deletions(-)
 create mode 100644 spiffworkflow-backend/migrations/versions/da22d9039670_.py
 create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
 create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_token.py

diff --git a/spiffworkflow-backend/migrations/alembic.ini b/spiffworkflow-backend/migrations/alembic.ini
index ec9d45c26..2919e80e2 100644
--- a/spiffworkflow-backend/migrations/alembic.ini
+++ b/spiffworkflow-backend/migrations/alembic.ini
@@ -1,6 +1,7 @@
 # A generic, single database configuration.
 
 [alembic]
+script_location = migrations
 # template used to generate migration files
 # file_template = %%(rev)s_%%(slug)s
 
diff --git a/spiffworkflow-backend/migrations/versions/da22d9039670_.py b/spiffworkflow-backend/migrations/versions/da22d9039670_.py
new file mode 100644
index 000000000..3ff2b0a45
--- /dev/null
+++ b/spiffworkflow-backend/migrations/versions/da22d9039670_.py
@@ -0,0 +1,40 @@
+"""empty message
+
+Revision ID: da22d9039670
+Revises: 384e2bbda36b
+Create Date: 2024-09-17 15:13:48.384925
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = 'da22d9039670'
+down_revision = '384e2bbda36b'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    # ### commands auto generated by Alembic - please adjust! ###
+    with op.batch_alter_table('human_task_user', schema=None) as batch_op:
+        batch_op.add_column(sa.Column('ended_at_in_seconds', sa.Integer(), nullable=True))
+        batch_op.add_column(sa.Column('created_at_in_seconds', sa.Integer(), nullable=True))
+
+    # with op.batch_alter_table('task', schema=None) as batch_op:
+    #     batch_op.drop_constraint('guid', type_='unique')
+
+    # ### end Alembic commands ###
+
+
+def downgrade():
+    # ### commands auto generated by Alembic - please adjust! ###
+    # with op.batch_alter_table('task', schema=None) as batch_op:
+    #     batch_op.create_unique_constraint('guid', ['guid'])
+
+    with op.batch_alter_table('human_task_user', schema=None) as batch_op:
+        batch_op.drop_column('created_at_in_seconds')
+        batch_op.drop_column('ended_at_in_seconds')
+
+    # ### end Alembic commands ###
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
index 2b50f7832..53617ce7d 100755
--- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
@@ -3422,6 +3422,135 @@ paths:
             application/json:
               schema:
                 $ref: "#/components/schemas/Workflow"
+  /task-filters:
+    post:
+      summary: Retrieve filtered tasks based on pagination and other criteria
+      operationId: spiffworkflow_backend.routes.ff_tasks_controller.filter_tasks
+      parameters:
+        - name: firstResult
+          in: query
+          required: true
+          description: The starting index of the results to return
+          schema:
+            type: integer
+        - name: maxResults
+          in: query
+          required: true
+          description: The maximum number of results to return
+          schema:
+            type: integer
+      responses:
+        "200":
+          description: Successfully retrieved filtered tasks
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: object
+                  properties:
+                    _links:
+                      type: object
+                      properties:
+                        self:
+                          type: object
+                          properties:
+                            href:
+                              type: string
+                    _embedded:
+                      type: object
+#                      properties:
+#                        assignee:
+#                          type: array
+#                          items:
+#                            $ref: '#/components/schemas/FfUser'
+#                        processDefinition:
+#                          type: array
+#                          items:
+#                            $ref: '#/components/schemas/ProcessDefinition'
+#                        task:
+#                          type: array
+#                          items:
+#                            $ref: '#/components/schemas/FfTask'
+                    count:
+                      type: integer
+  /task/{task_id}:
+    parameters:
+      - name: task_id
+        in: path
+        required: true
+        description: "The unique id of an existing process group."
+        schema:
+          type: string
+    get:
+      tags:
+        - Tasks
+      operationId: spiffworkflow_backend.routes.ff_tasks_controller.get_task_by_id
+      summary: "Gets one task that a user wants to complete"
+      responses:
+        "200":
+          description: "One task"
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Task"
+
+
+  /task/{task_id}/claim:
+    post:
+      summary: "Claim task"
+      operationId: spiffworkflow_backend.routes.ff_tasks_controller.claim_task
+      parameters:
+        - name: task_id
+          in: path
+          required: true
+          description: "The unique id of an existing process group."
+          schema:
+            type: string
+      responses:
+        "200":
+          description: "Successfully claimed task"
+          content:
+            application/json:
+              schema:
+                type: object
+  /task/{task_id}/unclaim:
+    post:
+      summary: "Unclaim task"
+      operationId: spiffworkflow_backend.routes.ff_tasks_controller.unclaim_task
+      parameters:
+        - name: task_id
+          in: path
+          required: true
+          description: "The unique id of an existing process group."
+          schema:
+            type: string
+      responses:
+        "200":
+          description: "Successfully unclaimed task"
+          content:
+            application/json:
+              schema:
+                type: object
+
+  /task/{task_id}/submit-form:
+    post:
+      summary: "Submit form"
+      operationId: spiffworkflow_backend.routes.ff_tasks_controller.submit_task
+      parameters:
+        - name: task_id
+          in: path
+          required: true
+          description: "The unique id of an existing process group."
+          schema:
+            type: string
+      responses:
+        "200":
+          description: "Successfully submitted task"
+          content:
+            application/json:
+              schema:
+                type: object
 
 components:
   securitySchemes:
@@ -4105,3 +4234,178 @@ components:
           type: number
           example: 1
           nullable: false
+
+#    FfUser:
+#      type: object
+#      properties:
+#        _links:
+#          type: object
+#          properties:
+#            self:
+#              type: object
+#              properties:
+#                href:
+#                  type: string
+#        _embedded:
+#          type: object
+#        id:
+#          type: string
+#        firstName:
+#          type: string
+#        lastName:
+#          type: string
+#        email:
+#          type: string
+#      ProcessDefinition:
+#        type: object
+#        properties:
+#          _links:
+#            type: object
+#            additionalProperties:
+#              type: object
+#              properties:
+#                href:
+#                  type: string
+#          _embedded:
+#            type: object
+#          id:
+#            type: string
+#          key:
+#            type: string
+#          category:
+#            type: string
+#          description:
+#            type: string
+#          name:
+#            type: string
+#          versionTag:
+#            type: string
+#          version:
+#            type: integer
+#          resource:
+#            type: string
+#          deploymentId:
+#            type: string
+#          diagram:
+#            type: string
+#          suspended:
+#            type: boolean
+#          contextPath:
+#            type: string
+#    FfTask:
+#      type: object
+#      properties:
+#        _links:
+#          type: object
+#          additionalProperties:
+#            type: object
+#            properties:
+#              href:
+#                type: string
+#        _embedded:
+#          type: object
+#          properties:
+#            candidateGroups:
+#              type: array
+#              items:
+#                $ref: '#/components/schemas/CandidateGroup'
+#            variable:
+#              type: array
+#              items:
+#                $ref: '#/components/schemas/Variable'
+#        id:
+#          type: string
+#        name:
+#          type: string
+#        assignee:
+#          type: string
+#        created:
+#          type: string
+#          format: date-time
+#        due:
+#          type: string
+#          format: date-time
+#        followUp:
+#          type: string
+#          format: date-time
+#        delegationState:
+#          type: string
+#        description:
+#          type: string
+#        executionId:
+#          type: string
+#        owner:
+#          type: string
+#        parentTaskId:
+#          type: string
+#        priority:
+#          type: integer
+#        processDefinitionId:
+#          type: string
+#        processInstanceId:
+#          type: string
+#        taskDefinitionKey:
+#          type: string
+#        caseExecutionId:
+#          type: string
+#        caseInstanceId:
+#          type: string
+#        caseDefinitionId:
+#          type: string
+#        suspended:
+#          type: boolean
+#        formKey:
+#          type: string
+#        camundaFormRef:
+#          type: string
+#        tenantId:
+#          type: string
+#      CandidateGroup:
+#        type: object
+#        properties:
+#          _links:
+#            type: object
+#            properties:
+#              group:
+#                type: object
+#                properties:
+#                  href:
+#                    type: string
+#              task:
+#                type: object
+#                properties:
+#                  href:
+#                    type: string
+#          _embedded:
+#            type: object
+#          type:
+#            type: string
+#          userId:
+#            type: string
+#          groupId:
+#            type: string
+#          taskId:
+#            type: string
+#      Variable:
+#        type: object
+#        properties:
+#          _links:
+#            type: object
+#            properties:
+#              self:
+#                type: object
+#                properties:
+#                  href:
+#                    type: string
+#          _embedded:
+#            type: object
+#          name:
+#            type: string
+#          value:
+#            type: string
+#          type:
+#            type: string
+#          valueInfo:
+#            type: object
+#            additionalProperties:
+#              type: string
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py
index c570c683f..3d759e9fd 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py
@@ -26,5 +26,7 @@ class HumanTaskUserModel(SpiffworkflowBaseDBModel):
     id = db.Column(db.Integer, primary_key=True)
     human_task_id = db.Column(ForeignKey(HumanTaskModel.id), nullable=False, index=True)  # type: ignore
     user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True)  # type: ignore
+    ended_at_in_seconds: int = db.Column(db.Integer)
+    created_at_in_seconds: int = db.Column(db.Integer)
 
     human_task = relationship(HumanTaskModel, back_populates="human_task_users")
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py
index 1b633c565..2fb4d3bd0 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py
@@ -19,7 +19,7 @@
 from spiffworkflow_backend.models.future_task import FutureTaskModel
 from spiffworkflow_backend.models.task import TaskModel  # noqa: F401
 from spiffworkflow_backend.models.user import UserModel
-
+from flask import current_app
 
 class ProcessInstanceNotFoundError(Exception):
     pass
@@ -212,7 +212,9 @@ def get_last_completed_task(self) -> TaskModel | None:
     def get_data(self) -> dict:
         """Returns the data of the last completed task in this process instance."""
         last_completed_task = self.get_last_completed_task()
+        current_app.logger.info(f"get_data::last_completed_task : {last_completed_task}")
         if last_completed_task:  # pragma: no cover
+            current_app.logger.info(f"last_completed_task : {last_completed_task.json_data()}")
             return last_completed_task.json_data()
         else:
             return {}
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/authentication_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/authentication_controller.py
index 3461c67b6..4941bc5f4 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/authentication_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/authentication_controller.py
@@ -356,6 +356,8 @@ def _get_user_model_from_token(decoded_token: dict) -> UserModel | None:
                                 }
 
                     if user_info is None:
+                        # here create a user from the token.
+
                         AuthenticationService.set_user_has_logged_out()
                         raise ApiError(
                             error_code="invalid_token",
@@ -377,12 +379,14 @@ def _get_user_model_from_token(decoded_token: dict) -> UserModel | None:
                         .first()
                     )
                     if user_model is None:
-                        AuthenticationService.set_user_has_logged_out()
-                        raise ApiError(
-                            error_code="invalid_user",
-                            message="Invalid user. Please log in.",
-                            status_code=401,
-                        )
+                        user_model: UserModel = UserService.create_user_from_token(decoded_token)
+                    # if user_model is None:
+                    #     AuthenticationService.set_user_has_logged_out()
+                    #     raise ApiError(
+                    #         error_code="invalid_user",
+                    #         message="Invalid user. Please log in.",
+                    #         status_code=401,
+                    #     )
                 # no user_info
                 else:
                     AuthenticationService.set_user_has_logged_out()
@@ -400,7 +404,7 @@ def _get_user_model_from_token(decoded_token: dict) -> UserModel | None:
                 message="Invalid token. Please log in.",
                 status_code=401,
             )
-
+    UserService.sync_user_with_token(decoded_token, user_model)
     return user_model
 
 
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
new file mode 100644
index 000000000..641579177
--- /dev/null
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
@@ -0,0 +1,391 @@
+import json
+from collections import OrderedDict
+from collections.abc import Generator
+from typing import Any
+from flask import jsonify, make_response
+from datetime import datetime
+
+import flask.wrappers
+import sentry_sdk
+from flask import current_app
+from flask import g
+from flask import jsonify
+from flask import make_response
+from flask import stream_with_context
+from flask.wrappers import Response
+from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException  # type: ignore
+from SpiffWorkflow.bpmn.workflow import BpmnWorkflow  # type: ignore
+from SpiffWorkflow.task import Task as SpiffTask  # type: ignore
+from SpiffWorkflow.util.task import TaskState  # type: ignore
+from sqlalchemy import and_
+from sqlalchemy import desc
+from sqlalchemy import func
+from sqlalchemy.exc import OperationalError
+from sqlalchemy.orm import aliased
+from sqlalchemy.orm.util import AliasedClass
+
+from spiffworkflow_backend.constants import SPIFFWORKFLOW_BACKEND_SERIALIZER_VERSION
+from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator
+from spiffworkflow_backend.exceptions.api_error import ApiError
+from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError
+from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
+from spiffworkflow_backend.models.db import db
+from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
+from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
+from spiffworkflow_backend.models.process_model import ProcessModelInfo
+from spiffworkflow_backend.models.json_data import JsonDataModel
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
+from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
+from spiffworkflow_backend.models.process_instance import ProcessInstanceTaskDataCannotBeUpdatedError
+from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
+from spiffworkflow_backend.models.task import Task
+from spiffworkflow_backend.models.task import TaskModel
+from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
+from spiffworkflow_backend.models.task_draft_data import TaskDraftDataDict
+from spiffworkflow_backend.models.task_draft_data import TaskDraftDataModel
+from spiffworkflow_backend.models.task_instructions_for_end_user import TaskInstructionsForEndUserModel
+from spiffworkflow_backend.models.user import UserModel
+from spiffworkflow_backend.routes.process_api_blueprint import _find_principal_or_raise
+from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_by_id_or_raise
+from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_for_me_or_raise
+from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
+from spiffworkflow_backend.routes.process_api_blueprint import _get_task_model_for_request, _get_task_model_by_guid
+from spiffworkflow_backend.routes.process_api_blueprint import _get_task_model_from_guid_or_raise
+from spiffworkflow_backend.routes.process_api_blueprint import _munge_form_ui_schema_based_on_hidden_fields_in_task_data
+from spiffworkflow_backend.routes.process_api_blueprint import _task_submit_shared
+from spiffworkflow_backend.routes.process_api_blueprint import _update_form_schema_with_task_data_as_needed
+from spiffworkflow_backend.services.authorization_service import AuthorizationService
+from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
+from spiffworkflow_backend.services.jinja_service import JinjaService
+from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
+from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
+from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
+from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
+from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
+from spiffworkflow_backend.services.task_service import TaskService
+from .tasks_controller import _get_tasks, task_assign
+import time
+
+
+def filter_tasks(body: dict, firstResult: int = 1, maxResults: int = 100)  -> flask.wrappers.Response:
+    """Filter tasks and return the list."""
+    if not body or body.get('criteria') is None:
+        return None
+    user_model: UserModel = g.user
+
+    human_tasks_query = (
+        db.session.query(HumanTaskModel, ProcessInstanceModel.id, ProcessModelInfo)
+        .group_by(
+            HumanTaskModel.id,  # Group by the ID of the human task
+            ProcessInstanceModel.id,  # Add the process instance ID to the GROUP BY clause
+            ProcessModelInfo.process_id
+        )  # type: ignore
+        .outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
+        .join(ProcessInstanceModel)
+        .join(ProcessModelInfo, ProcessModelInfo.id == ProcessInstanceModel.process_model_identifier)
+        .filter(
+            HumanTaskModel.completed == False,  # noqa: E712
+            ProcessInstanceModel.status != ProcessInstanceStatus.error.value,
+        )
+    )
+
+    # Join through HumanTaskUserModel to associate users to tasks
+    human_tasks_query = human_tasks_query.outerjoin(
+        HumanTaskUserModel,
+        and_(HumanTaskModel.id == HumanTaskUserModel.human_task_id, HumanTaskUserModel.ended_at_in_seconds == None)
+    ).outerjoin(UserModel, UserModel.id == HumanTaskUserModel.user_id)  # Join UserModel using HumanTaskUserModel
+
+    # Check candidateGroupsExpression with value ${currentUserGroups()}
+    if body.get('criteria').get('candidateGroupsExpression') == '${currentUserGroups()}':
+        human_tasks_query = human_tasks_query.filter(
+            GroupModel.identifier.in_([group.identifier for group in user_model.groups]))
+    if candidate_group := body.get('criteria').get('candidateGroup'):
+        human_tasks_query = human_tasks_query.filter(GroupModel.identifier == candidate_group)
+    if body.get('criteria').get('includeAssignedTasks', False):
+        human_tasks_query = human_tasks_query
+    else:
+        human_tasks_query = human_tasks_query.filter(~HumanTaskModel.human_task_users.any())
+
+    if process_def_key := body.get('criteria').get('processDefinitionKey'):
+        human_tasks_query = human_tasks_query.filter(ProcessInstanceModel.process_model_identifier == process_def_key)
+    if ''.join(body.get('criteria').get('assigneeExpression', '').split()) == '${currentUser()}':
+        human_tasks_query = human_tasks_query.filter(UserModel.username == user_model.username)
+
+    # TODO body.get('criteria').get('assignee', '')
+    # TODO body.get('criteria').get('processVariables', '')
+    # TODO body.get('criteria').get('sorting', '')
+
+
+
+    user_username_column = func.max(UserModel.username).label("process_initiator_username")
+    user_displayname_column = func.max(UserModel.display_name).label("process_initiator_firstname")
+    user_email_column = func.max(UserModel.email).label("process_initiator_email")
+    group_identifier_column = func.max(GroupModel.identifier).label("assigned_user_group_identifier")
+
+    human_tasks = (
+        human_tasks_query.add_columns(
+            user_username_column,
+            user_displayname_column,
+            user_email_column,
+            group_identifier_column,
+            HumanTaskModel.task_name,
+            HumanTaskModel.task_title,
+            HumanTaskModel.process_model_display_name,
+            HumanTaskModel.process_instance_id,
+            HumanTaskModel.updated_at_in_seconds,
+            HumanTaskModel.created_at_in_seconds
+        )
+        .order_by(desc(HumanTaskModel.id))  # type: ignore
+        .paginate(page=firstResult, per_page=maxResults, error_out=False)
+    )
+
+    return _format_response(human_tasks)
+
+
+def get_task_by_id(
+    task_id: str
+) -> flask.wrappers.Response:
+    # Query to join HumanTaskModel with HumanTaskUserModel
+    task_query = (
+        db.session.query(HumanTaskModel, HumanTaskUserModel, UserModel)
+        .join(HumanTaskUserModel, and_(HumanTaskModel.id == HumanTaskUserModel.human_task_id, HumanTaskUserModel.ended_at_in_seconds == None))
+        .join(UserModel, HumanTaskUserModel.user_id == UserModel.id)  # Join with UserModel to get user details
+        .filter(HumanTaskModel.task_guid == task_id)
+    )
+
+    tasks = task_query.all()
+
+    # If no tasks are found, return an empty list
+    if not tasks:
+        raise ApiError(
+            error_code="task_not_found",
+            message=f"Cannot find a task with id '{task_id}'",
+            status_code=400,
+        )
+    if not len(tasks) > 1:
+        raise ApiError(
+            error_code="more_than_one_task_found",
+            message=f"More tasks found for '{task_id}'",
+            status_code=400,
+        )
+    human_task, human_task_user, user_model = tasks[0]
+    return make_response(jsonify(format_human_task_response(human_task, user_model)), 200)
+
+
+def claim_task(
+    task_id: str,
+body: dict[str, Any],
+) -> flask.wrappers.Response:
+    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(id=task_id).one_or_none()
+    if task_model is None:
+        raise ApiError(
+            error_code="task_not_found",
+            message=f"Cannot find a task with id '{task_id}'",
+            status_code=400,
+        )
+
+    task_assign(modified_process_model_identifier=None, process_instance_id=task_model.process_instance_id, task_guid= task_model.task_guid,body={'user_ids': [body.get("userId")]})
+
+    return make_response(jsonify(format_human_task_response(task_model)), 200)
+
+def unclaim_task(
+    task_id: str,
+body: dict[str, Any],
+) -> flask.wrappers.Response:
+    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(id=task_id).one_or_none()
+    if task_model is None:
+        raise ApiError(
+            error_code="task_not_found",
+            message=f"Cannot find a task with id '{task_id}'",
+            status_code=400,
+        )
+
+    # formsflow.ai allows only one user per task.
+    human_task_users = HumanTaskUserModel.query.filter_by(ended_at_in_seconds=None, human_task=task_model).all()
+    for human_task_user in human_task_users:
+        human_task_user.ended_at_in_seconds = round(time.time())
+
+    SpiffworkflowBaseDBModel.commit_with_rollback_on_exception()
+
+    return make_response(jsonify({"ok": True}), 200)
+
+
+def get_task_variables( #TODO
+    task_id: int
+) -> flask.wrappers.Response:
+    pass
+
+def get_task_identity_links( #TODO
+    task_id: int
+) -> flask.wrappers.Response:
+    pass
+
+def submit_task(
+    task_id: str,
+body: dict[str, Any],
+) -> flask.wrappers.Response:
+    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(id=task_id).one_or_none()
+    if task_model is None:
+        raise ApiError(
+            error_code="task_not_found",
+            message=f"Cannot find a task with id '{task_id}'",
+            status_code=400,
+        )
+    # TODO Manage task variables submitted.
+    with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"):
+        response_item = _task_submit_shared(task_model.process_instance_id, task_model.task_guid, body)
+        return make_response(jsonify(response_item), 200)
+
+
+
+
+
+def _format_response(human_tasks):
+    response = []
+
+    tasks = []
+    for task in human_tasks.items:
+        task_data = {
+            "_links": {
+                # pass empty _links as spiff doesn't support HATEOAS
+            },
+            "_embedded": {
+                "candidateGroups": [
+                    {
+                        "_links": {
+                            "group": {
+                                "href": f"/group/{task.group_identifier_column}"
+                            },
+                            "task": {
+                                "href": f"/task/{task.HumanTaskModel.id}"
+                            }
+                        },
+                        "_embedded": None,
+                        "type": "candidate",
+                        "userId": None,  # TODO Find User ID
+                        "groupId": task.group_identifier_column,
+                        "taskId": task.HumanTaskModel.id
+                    }
+                ],
+                "variable": []  # TODO Retrieve from the task data
+            },
+            "id": task.HumanTaskModel.task_guid,
+            "name": task.HumanTaskModel.task_name,
+            "assignee": task.user_username_column,
+            "created": datetime.utcfromtimestamp(task.HumanTaskModel.created_at_in_seconds).isoformat() + 'Z',
+            "due": None,  # TODO
+            "followUp": None,  # TODO
+            "delegationState": None,
+            "description": None,
+            "executionId": task.HumanTaskModel.process_instance_id,
+            "owner": None,
+            "parentTaskId": None,
+            "priority": 50, #TODO
+            "processDefinitionId": task.ProcessModelInfo.process_id,
+            "processInstanceId": task.HumanTaskModel.process_instance_id,
+            "taskDefinitionKey": task.HumanTaskModel.task_id,
+            "caseExecutionId": None,
+            "caseInstanceId": None,
+            "caseDefinitionId": None,
+            "suspended": False,
+            "formKey": None,
+            "camundaFormRef": None,
+            "tenantId": None  # TODO
+        }
+
+        tasks.append(task_data)
+
+    assignees = [
+        {
+            "_links": {
+                "self": {
+                    "href": f"/user/{task.user_username_column}"
+                }
+            },
+            "_embedded": None,
+            "id": task.user_username_column,
+            "firstName": task.user_displayname_column,  # Replace with actual data
+            "lastName": "",  # Replace with actual data
+            "email": task.user_email_column  # Replace with actual data
+        }
+        for task in human_tasks.items
+    ]
+
+    process_definitions = [
+        {
+            "_links": {},
+            "_embedded": None,
+            "id": task.ProcessModelInfo.id,
+            "key": task.ProcessModelInfo.process_id,  # Replace with actual data
+            "category": "http://bpmn.io/schema/bpmn",
+            "description": task.ProcessModelInfo.description,
+            "name": task.ProcessModelInfo.display_name,
+            "versionTag": "1",  # TODO Replace with actual version if available
+            "version": 1,  # TODO Replace with actual version if available
+            "resource": f"{task.ProcessModelInfo.display_name}.bpmn",
+            "deploymentId": task.ProcessModelInfo.id,
+            "diagram": None,
+            "suspended": False,
+            "contextPath": None
+        }
+        for task in human_tasks.items
+    ]
+
+    response.append({
+        "_links": {},
+        "_embedded": {
+            "assignee": assignees,
+            "processDefinition": process_definitions,
+            "task": tasks
+        },
+        "count": human_tasks.total
+    })
+
+    response.append({  # TODO Add additional information
+        "variables": [
+            {
+                "name": "formName",
+                "label": "Form Name"
+            },
+            {
+                "name": "applicationId",
+                "label": "Submission Id"
+            }
+        ],
+        "taskVisibleAttributes": {
+            "applicationId": True,
+            "assignee": True,
+            "taskTitle": True,
+            "createdDate": True,
+            "dueDate": True,
+            "followUp": True,
+            "priority": True,
+            "groups": True
+        }
+    })
+
+    return make_response(jsonify(response), 200)
+
+
+def format_human_task_response(human_task: HumanTaskModel, user_model: UserModel) -> dict:
+    """
+    Format the human_task into the required response structure.
+    """
+    return {
+        "id": human_task.task_guid,
+        "name": human_task.task_title or human_task.task_name,
+        "assignee": user_model.username,
+        "created": datetime.utcfromtimestamp(human_task.created_at_in_seconds).isoformat() + "Z" if human_task.created_at_in_seconds else None,
+        "due": None,  #TODO
+        "followUp": None,  #TODO
+        "description": human_task.task_name,  # Assuming task_name serves as the description
+        "parentTaskId": None,  # No clear parent task id field in the model
+        "priority": 50,  # Default to 50 since there's no priority field in the model
+        "processDefinitionId": human_task.bpmn_process_identifier,  # Mapping to bpmn_process_identifier
+        "processInstanceId": human_task.process_instance_id,
+        "taskDefinitionKey": human_task.task_id,  # Mapping taskDefinitionKey to task_id
+        "tenantId": None  # TODO
+    }
+
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py
index 6bbbc7077..65902f0a5 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py
@@ -98,6 +98,8 @@ def message_send(
 ) -> flask.wrappers.Response:
     receiver_message = MessageService.run_process_model_from_message(modified_message_name, body, execution_mode)
     process_instance = ProcessInstanceModel.query.filter_by(id=receiver_message.process_instance_id).first()
+
+
     response_json = {
         "task_data": process_instance.get_data(),
         "process_instance": ProcessInstanceModelSchema().dump(process_instance),
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py
index fecfdf7a0..38f6e52b4 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py
@@ -836,3 +836,14 @@ def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(form_ui_schema: di
                 relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part]
                 if len(hidden_field_parts) == ii + 1:
                     relevant_depth_of_ui_schema["ui:widget"] = "hidden"
+
+
+def _get_task_model_by_guid(task_guid: str, process_instance_id: int) -> TaskModel:
+    task_model: TaskModel | None = TaskModel.query.filter_by(guid=task_guid, process_instance_id=process_instance_id).first()
+    if task_model is None:
+        raise ApiError(
+            error_code="task_not_found",
+            message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'",
+            status_code=400,
+        )
+    return task_model
\ No newline at end of file
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py
index 8b0958479..50ea38987 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py
@@ -1,3 +1,5 @@
+import uuid
+
 from spiffworkflow_backend.helpers.spiff_enum import ProcessInstanceExecutionMode
 
 # black and ruff are in competition with each other in import formatting so ignore ruff
@@ -5,6 +7,12 @@
 
 import json
 from typing import Any
+import time
+
+import copy
+import json
+import uuid
+from hashlib import sha256
 
 import flask.wrappers
 from flask import current_app
@@ -72,7 +80,32 @@ def process_instance_start(
     current_app.logger.info(f"Instance created succesfully : {process_instance.id}")
 
     current_app.logger.info("running the instance")
-    return process_instance_run(process_model_identifier, process_instance.id, force_run, execution_mode)
+    process_instance_response = process_instance_run(process_model_identifier, process_instance.id, force_run, execution_mode)
+
+    # Create a dummy task to hold the process instance data
+    blank_json = json.dumps({})
+    blank_json_data_hash = sha256(blank_json.encode("utf8")).hexdigest()
+    json_data_hash = sha256(json.dumps(body).encode("utf8")).hexdigest()
+    # Find the task definition for the start event and use it
+    print("process_instance.bpmn_process_definition_id ", process_instance.bpmn_process_definition_id)
+    task_def_model: TaskDefinitionModel = TaskDefinitionModel.query.filter_by(typename='StartEvent',
+                                                                              bpmn_process_definition_id=process_instance.bpmn_process_definition_id).first()
+
+    TaskModel(
+        guid=uuid.uuid4(),
+        bpmn_process_id=process_instance.bpmn_process_id,
+        process_instance_id=process_instance.id,
+        task_definition_id=task_def_model.id,
+        state='COMPLETED',
+        properties_json={},
+        start_in_seconds=time.time(),
+        end_in_seconds=time.time(),
+        json_data_hash=json_data_hash,
+        python_env_data_hash=blank_json_data_hash,
+        data=body
+    )
+
+    return process_instance_response
 
 
 def process_instance_create(
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py
index 1f5c2a694..d6f40c4fa 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py
@@ -47,6 +47,7 @@
 from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator  # type: ignore
 from spiffworkflow_backend.services.custom_parser import MyCustomParser
 from lxml import etree  # type: ignore
+from spiffworkflow_backend.routes.process_instances_controller import process_instance_create
 
 
 def process_model_create_formsflow(upload: FileStorage) -> flask.wrappers.Response:
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
index 2a7d2731f..dd74c81da 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
@@ -62,7 +62,7 @@
 from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
 from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
 from spiffworkflow_backend.services.task_service import TaskService
-
+import time
 
 def task_allows_guest(
     process_instance_id: int,
@@ -377,11 +377,15 @@ def task_assign(
         )
 
     human_task = human_tasks[0]
+    # formsflow.ai allows only one user per task.
+    human_task_users = HumanTaskUserModel.query.filter_by(ended_at_in_seconds=None, human_task=human_task).all()
+    for human_task_user in human_task_users:
+        human_task_user.ended_at_in_seconds = round(time.time())
 
     for user_id in body["user_ids"]:
         human_task_user = HumanTaskUserModel.query.filter_by(user_id=user_id, human_task=human_task).first()
         if human_task_user is None:
-            human_task_user = HumanTaskUserModel(user_id=user_id, human_task=human_task)
+            human_task_user = HumanTaskUserModel(user_id=user_id, human_task=human_task, created_at_in_seconds=round(time.time()))
             db.session.add(human_task_user)
 
     SpiffworkflowBaseDBModel.commit_with_rollback_on_exception()
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_token.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_token.py
new file mode 100644
index 000000000..a5d8d96cf
--- /dev/null
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_token.py
@@ -0,0 +1,31 @@
+import requests
+from flask import current_app
+
+
+def create_token() -> str:
+    """Create keycloak service token and return."""
+    # Get Keycloak configuration from Flask app config
+    url: str = current_app.config.get("SPIFFWORKFLOW_BACKEND_AUTH_CONFIGS").get("uri")
+    client: str = current_app.config.get("SPIFFWORKFLOW_BACKEND_AUTH_CONFIGS").get("client_id")
+    secret: str = current_app.config.get("SPIFFWORKFLOW_BACKEND_AUTH_CONFIGS").get("client_secret")
+
+    # Prepare the token request payload
+    token_url = f"{url}/protocol/openid-connect/token"
+    data = {
+        'grant_type': 'client_credentials',
+        'client_id': client,
+        'client_secret': secret
+    }
+
+    # Make the request to Keycloak to get the token
+    try:
+        response = requests.post(token_url, data=data)
+        response.raise_for_status()  # Raise an error for bad status codes
+
+        # Parse the access token from the response
+        token_data = response.json()
+        return token_data.get('access_token')
+
+    except requests.exceptions.RequestException as e:
+        current_app.logger.error(f"Failed to retrieve token: {e}")
+        raise
\ No newline at end of file
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py
index ce755790d..3b11a1465 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py
@@ -94,7 +94,9 @@ class AuthorizationService:
     @classmethod
     def has_permission(cls, principals: list[PrincipalModel], permission: str, target_uri: str) -> bool:
         principal_ids = [p.id for p in principals]
+        print("principal_ids ", principal_ids)
         target_uri_normalized = target_uri.removeprefix(V1_API_PATH_PREFIX)
+        print("target_uri_normalized ", target_uri_normalized)
 
         permission_assignments = (
             PermissionAssignmentModel.query.filter(PermissionAssignmentModel.principal_id.in_(principal_ids))
@@ -112,6 +114,7 @@ def has_permission(cls, principals: list[PrincipalModel], permission: str, targe
             )
             .all()
         )
+        print("permission_assignments ", permission_assignments)
 
         if len(permission_assignments) == 0:
             return False
@@ -130,6 +133,7 @@ def has_permission(cls, principals: list[PrincipalModel], permission: str, targe
     @classmethod
     def user_has_permission(cls, user: UserModel, permission: str, target_uri: str) -> bool:
         principals = UserService.all_principals_for_user(user)
+        print("principals -->", principals)
         return cls.has_permission(principals, permission, target_uri)
 
     @classmethod
@@ -349,7 +353,10 @@ def get_permission_from_http_method(cls, http_method: str) -> str | None:
     @classmethod
     def check_permission_for_request(cls) -> None:
         permission_string = cls.get_permission_from_http_method(request.method)
+        print("permission_string ", permission_string)
         if permission_string:
+            print("g.user ", g.user)
+            print("request.path ", request.path)
             has_permission = cls.user_has_permission(
                 user=g.user,
                 permission=permission_string,
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py
index 0ca8ef8d7..c4828c468 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py
@@ -27,7 +27,7 @@
 from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
 from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
 from spiffworkflow_backend.services.user_service import UserService
-
+from spiffworkflow_backend.scripts.get_token import create_token
 
 class MessageServiceError(Exception):
     pass
@@ -108,6 +108,7 @@ def correlate_send_message(
                     message_instance_send.counterpart_id = message_instance_receive.id
                     db.session.add(message_instance_send)
                     db.session.commit()
+
                 if should_queue_process_instance(receiving_process_instance, execution_mode=execution_mode):
                     queue_process_instance_if_appropriate(receiving_process_instance, execution_mode=execution_mode)
                 return message_instance_receive
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
index caa62ac24..37d6cf983 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
@@ -948,36 +948,53 @@ def get_potential_owner_ids_from_task(self, task: SpiffTask) -> PotentialOwnerId
         elif re.match(r"(process.?)initiator", task_lane, re.IGNORECASE):
             potential_owner_ids = [self.process_instance_model.process_initiator_id]
         else:
-            group_model = GroupModel.query.filter_by(identifier=task_lane).first()
+            group_model = self._find_or_create_group(task_lane)
             if group_model is not None:
                 lane_assignment_id = group_model.id
-            if "lane_owners" in task.data and task_lane in task.data["lane_owners"]:
+
+            if "candidate_group" in task.data: # Add capability to add group in script task.
+                group_model = self._find_or_create_group(task.data["candidate_group"])
+                if group_model is not None:
+                    lane_assignment_id = group_model.id
+
+            elif "lane_owners" in task.data and task_lane in task.data["lane_owners"]:
                 for username in task.data["lane_owners"][task_lane]:
                     lane_owner_user = UserModel.query.filter_by(username=username).first()
                     if lane_owner_user is not None:
                         potential_owner_ids.append(lane_owner_user.id)
-                self.raise_if_no_potential_owners(
-                    potential_owner_ids,
-                    (
-                        "No users found in task data lane owner list for lane:"
-                        f" {task_lane}. The user list used:"
-                        f" {task.data['lane_owners'][task_lane]}"
-                    ),
-                )
+                #TODO in formsflow tasks can come first and users or groups created later
+                # self.raise_if_no_potential_owners(
+                #     potential_owner_ids,
+                #     (
+                #         "No users found in task data lane owner list for lane:"
+                #         f" {task_lane}. The user list used:"
+                #         f" {task.data['lane_owners'][task_lane]}"
+                #     ),
+                # )
             else:
                 if group_model is None:
                     raise (NoPotentialOwnersForTaskError(f"Could not find a group with name matching lane: {task_lane}"))
                 potential_owner_ids = [i.user_id for i in group_model.user_group_assignments]
-                self.raise_if_no_potential_owners(
-                    potential_owner_ids,
-                    f"Could not find any users in group to assign to lane: {task_lane}",
-                )
+                # TODO in formsflow tasks can come first and users or groups created later
+                # self.raise_if_no_potential_owners(
+                #     potential_owner_ids,
+                #     f"Could not find any users in group to assign to lane: {task_lane}",
+                # )
 
         return {
             "potential_owner_ids": potential_owner_ids,
             "lane_assignment_id": lane_assignment_id,
         }
 
+    def _find_or_create_group(self, task_lane):
+        group_model = GroupModel.query.filter_by(identifier=task_lane).first()
+        if group_model is None:
+            group_model = GroupModel(name=task_lane, identifier=task_lane)
+            db.session.add(group_model)
+            db.session.commit()
+            db.session.refresh(group_model)
+        return group_model
+
     def extract_metadata(self) -> None:
         # we are currently not getting the metadata extraction paths based on the version in git from the process instance.
         # it would make sense to do that if the shell-out-to-git performance cost was not too high.
@@ -1043,6 +1060,7 @@ def _store_bpmn_process_definition(
         store_bpmn_definition_mappings: bool = False,
         full_bpmn_spec_dict: dict | None = None,
     ) -> BpmnProcessDefinitionModel:
+        # CHECK HERE
         process_bpmn_identifier = process_bpmn_properties["name"]
         process_bpmn_name = process_bpmn_properties["description"]
 
@@ -1440,7 +1458,7 @@ def get_spec(
         # Add only the main file for now, for POC.
 
         # for file in files:
-        data = process_model_info.content.tobytes()
+        data = process_model_info.content#.tobytes()
         try:
             if process_model_info.type == FileType.bpmn.value:
                 bpmn: etree.Element = SpecFileService.get_etree_from_xml_bytes(data)
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py
index 2d82d6afd..169c1d101 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py
@@ -130,6 +130,43 @@ def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None:
             db.session.add(ugam)
             db.session.commit()
 
+    @classmethod
+    def create_user_from_token(cls, token_info):
+        """Create the user, group and principal from the token."""
+        user_model: UserModel = cls.create_user(
+            username=token_info.get('preferred_username'),
+            service=token_info.get('iss'),
+            service_id=token_info.get('sub'),
+            email=token_info.get('email'),
+            display_name=token_info.get('name')
+        )
+        cls.sync_user_with_token(token_info, user_model)
+
+    @classmethod
+    def sync_user_with_token(cls, token_info, user_model):
+        if not token_info or not user_model:
+            return
+        # Create group if it doesn't exist
+        token_groups = token_info.get('groups') or token_info.get('roles')
+        for token_group in token_groups:
+            token_group = token_group.lstrip("/")
+            group: GroupModel = GroupModel.query.filter_by(identifier=token_group).one_or_none()
+            if not group:
+                group = GroupModel(identifier=token_group)
+                db.session.add(group)
+            # Create user group assignment for this user.
+            uga: UserGroupAssignmentModel = UserGroupAssignmentModel.query.filter_by(user_id=user_model.id).filter_by(
+                group_id=group.id).one_or_none()
+            if not uga:
+                uga = UserGroupAssignmentModel(user_id=user_model.id, group_id=group.id)
+                db.session.add(uga)
+            # Create principal for this group
+            principal: PrincipalModel = PrincipalModel.query.filter_by(group_id=group.id).one_or_none()
+            if not principal:
+                principal = PrincipalModel(group_id=group.id)
+                db.session.add(principal)
+        db.session.commit()
+
     @classmethod
     def add_waiting_group_assignment(
             cls, username: str, group: GroupModel

From d1eadbcd5814f3248ae079b05b34ce1fb55dc9e9 Mon Sep 17 00:00:00 2001
From: Sumesh Punakkal Kariyil <sumesh.pk@aot-technologies.com>
Date: Fri, 18 Oct 2024 16:54:55 -0700
Subject: [PATCH 07/10] Update ff_tasks_controller.py

---
 .../routes/ff_tasks_controller.py             | 297 +++++++++---------
 1 file changed, 143 insertions(+), 154 deletions(-)

diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
index 641579177..4501754bc 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
@@ -1,90 +1,70 @@
-import json
-from collections import OrderedDict
-from collections.abc import Generator
 from typing import Any
-from flask import jsonify, make_response
+import time
 from datetime import datetime
+from typing import Any
+from sqlalchemy import and_, asc, desc, cast
+from sqlalchemy.types import String
 
 import flask.wrappers
 import sentry_sdk
-from flask import current_app
-from flask import g
-from flask import jsonify
-from flask import make_response
-from flask import stream_with_context
-from flask.wrappers import Response
 from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException  # type: ignore
 from SpiffWorkflow.bpmn.workflow import BpmnWorkflow  # type: ignore
 from SpiffWorkflow.task import Task as SpiffTask  # type: ignore
 from SpiffWorkflow.util.task import TaskState  # type: ignore
-from sqlalchemy import and_
-from sqlalchemy import desc
-from sqlalchemy import func
-from sqlalchemy.exc import OperationalError
-from sqlalchemy.orm import aliased
-from sqlalchemy.orm.util import AliasedClass
-
-from spiffworkflow_backend.constants import SPIFFWORKFLOW_BACKEND_SERIALIZER_VERSION
-from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator
+from flask import current_app
+from flask import g
+from flask import jsonify
+from flask import make_response
 from spiffworkflow_backend.exceptions.api_error import ApiError
-from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError
 from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
 from spiffworkflow_backend.models.db import db
 from spiffworkflow_backend.models.group import GroupModel
 from spiffworkflow_backend.models.human_task import HumanTaskModel
+from spiffworkflow_backend.models.task import TaskModel
 from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
-from spiffworkflow_backend.models.process_model import ProcessModelInfo
-from spiffworkflow_backend.models.json_data import JsonDataModel
 from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
-from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
 from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
-from spiffworkflow_backend.models.process_instance import ProcessInstanceTaskDataCannotBeUpdatedError
-from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
-from spiffworkflow_backend.models.task import Task
-from spiffworkflow_backend.models.task import TaskModel
-from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
-from spiffworkflow_backend.models.task_draft_data import TaskDraftDataDict
-from spiffworkflow_backend.models.task_draft_data import TaskDraftDataModel
-from spiffworkflow_backend.models.task_instructions_for_end_user import TaskInstructionsForEndUserModel
+from spiffworkflow_backend.models.process_model import ProcessModelInfo
 from spiffworkflow_backend.models.user import UserModel
-from spiffworkflow_backend.routes.process_api_blueprint import _find_principal_or_raise
-from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_by_id_or_raise
-from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_for_me_or_raise
-from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
-from spiffworkflow_backend.routes.process_api_blueprint import _get_task_model_for_request, _get_task_model_by_guid
-from spiffworkflow_backend.routes.process_api_blueprint import _get_task_model_from_guid_or_raise
-from spiffworkflow_backend.routes.process_api_blueprint import _munge_form_ui_schema_based_on_hidden_fields_in_task_data
+from spiffworkflow_backend.models.json_data import JsonDataModel
 from spiffworkflow_backend.routes.process_api_blueprint import _task_submit_shared
-from spiffworkflow_backend.routes.process_api_blueprint import _update_form_schema_with_task_data_as_needed
-from spiffworkflow_backend.services.authorization_service import AuthorizationService
-from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
-from spiffworkflow_backend.services.jinja_service import JinjaService
-from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
-from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
-from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
-from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
-from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
-from spiffworkflow_backend.services.task_service import TaskService
-from .tasks_controller import _get_tasks, task_assign
-import time
+from sqlalchemy import and_
+from sqlalchemy import desc
+from sqlalchemy import func
+
+from .tasks_controller import task_assign
 
 
-def filter_tasks(body: dict, firstResult: int = 1, maxResults: int = 100)  -> flask.wrappers.Response:
+def filter_tasks(body: dict, firstResult: int = 1, maxResults: int = 100) -> flask.wrappers.Response:
     """Filter tasks and return the list."""
     if not body or body.get('criteria') is None:
         return None
     user_model: UserModel = g.user
 
     human_tasks_query = (
-        db.session.query(HumanTaskModel, ProcessInstanceModel.id, ProcessModelInfo)
+        db.session.query(
+            HumanTaskModel, ProcessInstanceModel.id, ProcessModelInfo,
+            func.max(UserModel.username).label("process_initiator_username"),
+            func.max(UserModel.display_name).label("process_initiator_firstname"),
+            func.max(UserModel.email).label("process_initiator_email"),
+            func.max(GroupModel.identifier).label("assigned_user_group_identifier")
+        ).distinct(HumanTaskModel.id)
         .group_by(
             HumanTaskModel.id,  # Group by the ID of the human task
             ProcessInstanceModel.id,  # Add the process instance ID to the GROUP BY clause
-            ProcessModelInfo.process_id
+            ProcessModelInfo.process_id,
+            # GroupModel.identifier
         )  # type: ignore
         .outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
         .join(ProcessInstanceModel)
         .join(ProcessModelInfo, ProcessModelInfo.id == ProcessInstanceModel.process_model_identifier)
+        .outerjoin(HumanTaskUserModel, and_(
+            HumanTaskModel.id == HumanTaskUserModel.human_task_id,
+            HumanTaskUserModel.ended_at_in_seconds == None
+        ))
+        .outerjoin(UserModel, UserModel.id == HumanTaskUserModel.user_id)
+        .outerjoin(TaskModel, TaskModel.guid == HumanTaskModel.task_id)
+        .outerjoin(JsonDataModel, JsonDataModel.hash == TaskModel.json_data_hash)
         .filter(
             HumanTaskModel.completed == False,  # noqa: E712
             ProcessInstanceModel.status != ProcessInstanceStatus.error.value,
@@ -92,65 +72,69 @@ def filter_tasks(body: dict, firstResult: int = 1, maxResults: int = 100)  -> fl
     )
 
     # Join through HumanTaskUserModel to associate users to tasks
-    human_tasks_query = human_tasks_query.outerjoin(
-        HumanTaskUserModel,
-        and_(HumanTaskModel.id == HumanTaskUserModel.human_task_id, HumanTaskUserModel.ended_at_in_seconds == None)
-    ).outerjoin(UserModel, UserModel.id == HumanTaskUserModel.user_id)  # Join UserModel using HumanTaskUserModel
-
-    # Check candidateGroupsExpression with value ${currentUserGroups()}
     if body.get('criteria').get('candidateGroupsExpression') == '${currentUserGroups()}':
         human_tasks_query = human_tasks_query.filter(
             GroupModel.identifier.in_([group.identifier for group in user_model.groups]))
     if candidate_group := body.get('criteria').get('candidateGroup'):
         human_tasks_query = human_tasks_query.filter(GroupModel.identifier == candidate_group)
-    if body.get('criteria').get('includeAssignedTasks', False):
-        human_tasks_query = human_tasks_query
-    else:
+    if not body.get('criteria').get('includeAssignedTasks', False):
         human_tasks_query = human_tasks_query.filter(~HumanTaskModel.human_task_users.any())
 
     if process_def_key := body.get('criteria').get('processDefinitionKey'):
         human_tasks_query = human_tasks_query.filter(ProcessInstanceModel.process_model_identifier == process_def_key)
     if ''.join(body.get('criteria').get('assigneeExpression', '').split()) == '${currentUser()}':
         human_tasks_query = human_tasks_query.filter(UserModel.username == user_model.username)
+    if assignee := body.get('criteria').get('assignee'):
+        human_tasks_query = human_tasks_query.filter(UserModel.username == assignee)
+    if assignee := body.get('criteria').get('assignee'):
+        human_tasks_query = human_tasks_query.filter(UserModel.username == assignee)
+
+    #  Filtering by process variables
+    process_variables = body.get('criteria', {}).get('processVariables', [])
+    if process_variables:
+        for variable in process_variables:
+            var_name = variable.get('name')
+            var_value = variable.get('value')
+            json_field = JsonDataModel.data['data'].op('->>')(var_name)
+            human_tasks_query = human_tasks_query.filter(cast(json_field, String) == var_value)
+
+    # Sorting logic
+    sorting_criteria = body.get('criteria', {}).get('sorting', [])
+    if sorting_criteria:
+        for sort_item in sorting_criteria:
+            sort_by = sort_item.get('sortBy')
+            sort_order = sort_item.get('sortOrder', 'asc')  # Default to ascending if not provided
+
+            if sort_by == 'created':
+                human_tasks_query = human_tasks_query.order_by(
+                    asc(HumanTaskModel.id),
+                    asc(HumanTaskModel.created_at_in_seconds) if sort_order == 'asc' else desc(HumanTaskModel.created_at_in_seconds)
+                )
+            elif sort_by == 'title':
+                human_tasks_query = human_tasks_query.order_by(
+                    asc(HumanTaskModel.id),
+                    asc(HumanTaskModel.task_title) if sort_order == 'asc' else desc(HumanTaskModel.task_title)
+                )
 
-    # TODO body.get('criteria').get('assignee', '')
-    # TODO body.get('criteria').get('processVariables', '')
-    # TODO body.get('criteria').get('sorting', '')
-
-
-
-    user_username_column = func.max(UserModel.username).label("process_initiator_username")
-    user_displayname_column = func.max(UserModel.display_name).label("process_initiator_firstname")
-    user_email_column = func.max(UserModel.email).label("process_initiator_email")
-    group_identifier_column = func.max(GroupModel.identifier).label("assigned_user_group_identifier")
-
-    human_tasks = (
-        human_tasks_query.add_columns(
-            user_username_column,
-            user_displayname_column,
-            user_email_column,
-            group_identifier_column,
-            HumanTaskModel.task_name,
-            HumanTaskModel.task_title,
-            HumanTaskModel.process_model_display_name,
-            HumanTaskModel.process_instance_id,
-            HumanTaskModel.updated_at_in_seconds,
-            HumanTaskModel.created_at_in_seconds
-        )
-        .order_by(desc(HumanTaskModel.id))  # type: ignore
-        .paginate(page=firstResult, per_page=maxResults, error_out=False)
-    )
+    else:
+        human_tasks_query = human_tasks_query.order_by(desc(HumanTaskModel.id))  # Order by task ID
+
+    current_app.logger.info("human_tasks_query --->")
+    current_app.logger.info(human_tasks_query)
+
+    human_tasks = human_tasks_query.paginate(page=firstResult, per_page=maxResults, error_out=False)
 
     return _format_response(human_tasks)
 
 
 def get_task_by_id(
-    task_id: str
+        task_id: str
 ) -> flask.wrappers.Response:
     # Query to join HumanTaskModel with HumanTaskUserModel
     task_query = (
         db.session.query(HumanTaskModel, HumanTaskUserModel, UserModel)
-        .join(HumanTaskUserModel, and_(HumanTaskModel.id == HumanTaskUserModel.human_task_id, HumanTaskUserModel.ended_at_in_seconds == None))
+        .join(HumanTaskUserModel, and_(HumanTaskModel.id == HumanTaskUserModel.human_task_id,
+                                       HumanTaskUserModel.ended_at_in_seconds == None))
         .join(UserModel, HumanTaskUserModel.user_id == UserModel.id)  # Join with UserModel to get user details
         .filter(HumanTaskModel.task_guid == task_id)
     )
@@ -175,8 +159,8 @@ def get_task_by_id(
 
 
 def claim_task(
-    task_id: str,
-body: dict[str, Any],
+        task_id: str,
+        body: dict[str, Any],
 ) -> flask.wrappers.Response:
     task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(id=task_id).one_or_none()
     if task_model is None:
@@ -186,13 +170,15 @@ def claim_task(
             status_code=400,
         )
 
-    task_assign(modified_process_model_identifier=None, process_instance_id=task_model.process_instance_id, task_guid= task_model.task_guid,body={'user_ids': [body.get("userId")]})
+    task_assign(modified_process_model_identifier=None, process_instance_id=task_model.process_instance_id,
+                task_guid=task_model.task_guid, body={'user_ids': [body.get("userId")]})
 
     return make_response(jsonify(format_human_task_response(task_model)), 200)
 
+
 def unclaim_task(
-    task_id: str,
-body: dict[str, Any],
+        task_id: str,
+        body: dict[str, Any],
 ) -> flask.wrappers.Response:
     task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(id=task_id).one_or_none()
     if task_model is None:
@@ -212,19 +198,21 @@ def unclaim_task(
     return make_response(jsonify({"ok": True}), 200)
 
 
-def get_task_variables( #TODO
-    task_id: int
+def get_task_variables(  # TODO
+        task_id: int
 ) -> flask.wrappers.Response:
     pass
 
-def get_task_identity_links( #TODO
-    task_id: int
+
+def get_task_identity_links(  # TODO
+        task_id: int
 ) -> flask.wrappers.Response:
     pass
 
+
 def submit_task(
-    task_id: str,
-body: dict[str, Any],
+        task_id: str,
+        body: dict[str, Any],
 ) -> flask.wrappers.Response:
     task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(id=task_id).one_or_none()
     if task_model is None:
@@ -239,9 +227,6 @@ def submit_task(
         return make_response(jsonify(response_item), 200)
 
 
-
-
-
 def _format_response(human_tasks):
     response = []
 
@@ -249,14 +234,14 @@ def _format_response(human_tasks):
     for task in human_tasks.items:
         task_data = {
             "_links": {
-                # pass empty _links as spiff doesn't support HATEOAS
+                # Empty _links as spiff doesn't support HATEOAS
             },
             "_embedded": {
                 "candidateGroups": [
                     {
                         "_links": {
                             "group": {
-                                "href": f"/group/{task.group_identifier_column}"
+                                "href": f"/group/{task.assigned_user_group_identifier}"
                             },
                             "task": {
                                 "href": f"/task/{task.HumanTaskModel.id}"
@@ -264,16 +249,18 @@ def _format_response(human_tasks):
                         },
                         "_embedded": None,
                         "type": "candidate",
-                        "userId": None,  # TODO Find User ID
-                        "groupId": task.group_identifier_column,
+                        "userId": None,
+                        "groupId": task.assigned_user_group_identifier,
                         "taskId": task.HumanTaskModel.id
                     }
                 ],
-                "variable": []  # TODO Retrieve from the task data
+                "variable": [
+                    task.HumanTaskModel.task_model.get_data()  # TODO adjust to match with Camunda response
+                ]
             },
             "id": task.HumanTaskModel.task_guid,
-            "name": task.HumanTaskModel.task_name,
-            "assignee": task.user_username_column,
+            "name": task.HumanTaskModel.task_title,
+            "assignee": task.process_initiator_username,
             "created": datetime.utcfromtimestamp(task.HumanTaskModel.created_at_in_seconds).isoformat() + 'Z',
             "due": None,  # TODO
             "followUp": None,  # TODO
@@ -282,7 +269,7 @@ def _format_response(human_tasks):
             "executionId": task.HumanTaskModel.process_instance_id,
             "owner": None,
             "parentTaskId": None,
-            "priority": 50, #TODO
+            "priority": 50,  # TODO: Dynamically set this value
             "processDefinitionId": task.ProcessModelInfo.process_id,
             "processInstanceId": task.HumanTaskModel.process_instance_id,
             "taskDefinitionKey": task.HumanTaskModel.task_id,
@@ -292,46 +279,48 @@ def _format_response(human_tasks):
             "suspended": False,
             "formKey": None,
             "camundaFormRef": None,
-            "tenantId": None  # TODO
+            "tenantId": None
         }
 
         tasks.append(task_data)
 
-    assignees = [
-        {
-            "_links": {
-                "self": {
-                    "href": f"/user/{task.user_username_column}"
-                }
-            },
-            "_embedded": None,
-            "id": task.user_username_column,
-            "firstName": task.user_displayname_column,  # Replace with actual data
-            "lastName": "",  # Replace with actual data
-            "email": task.user_email_column  # Replace with actual data
-        }
-        for task in human_tasks.items
-    ]
-
-    process_definitions = [
-        {
-            "_links": {},
-            "_embedded": None,
-            "id": task.ProcessModelInfo.id,
-            "key": task.ProcessModelInfo.process_id,  # Replace with actual data
-            "category": "http://bpmn.io/schema/bpmn",
-            "description": task.ProcessModelInfo.description,
-            "name": task.ProcessModelInfo.display_name,
-            "versionTag": "1",  # TODO Replace with actual version if available
-            "version": 1,  # TODO Replace with actual version if available
-            "resource": f"{task.ProcessModelInfo.display_name}.bpmn",
-            "deploymentId": task.ProcessModelInfo.id,
-            "diagram": None,
-            "suspended": False,
-            "contextPath": None
-        }
-        for task in human_tasks.items
-    ]
+    # Remove duplicates from the assignees list based on unique username
+    assignees = list({
+                         task.process_initiator_username: {
+                             "_links": {
+                                 "self": {
+                                     "href": f"/user/{task.process_initiator_username}"
+                                 }
+                             },
+                             "_embedded": None,
+                             "id": task.process_initiator_username,
+                             "firstName": task.process_initiator_firstname,
+                             "lastName": "",  # Replace with actual data if available
+                             "email": task.process_initiator_email
+                         }
+                         for task in human_tasks.items
+                     }.values())
+
+    # Remove duplicates from processDefinition list based on unique process ID
+    process_definitions = list({
+                                   task.ProcessModelInfo.id: {
+                                       "_links": {},
+                                       "_embedded": None,
+                                       "id": task.ProcessModelInfo.id,
+                                       "key": task.ProcessModelInfo.process_id,
+                                       "category": "http://bpmn.io/schema/bpmn",
+                                       "description": task.ProcessModelInfo.description,
+                                       "name": task.ProcessModelInfo.display_name,
+                                       "versionTag": "1",  # TODO Replace with actual version if available
+                                       "version": 1,  # TODO Replace with actual version if available
+                                       "resource": f"{task.ProcessModelInfo.display_name}.bpmn",
+                                       "deploymentId": task.ProcessModelInfo.id,
+                                       "diagram": None,
+                                       "suspended": False,
+                                       "contextPath": None
+                                   }
+                                   for task in human_tasks.items
+                               }.values())
 
     response.append({
         "_links": {},
@@ -343,7 +332,7 @@ def _format_response(human_tasks):
         "count": human_tasks.total
     })
 
-    response.append({  # TODO Add additional information
+    response.append({  # Additional information about variables
         "variables": [
             {
                 "name": "formName",
@@ -377,9 +366,10 @@ def format_human_task_response(human_task: HumanTaskModel, user_model: UserModel
         "id": human_task.task_guid,
         "name": human_task.task_title or human_task.task_name,
         "assignee": user_model.username,
-        "created": datetime.utcfromtimestamp(human_task.created_at_in_seconds).isoformat() + "Z" if human_task.created_at_in_seconds else None,
-        "due": None,  #TODO
-        "followUp": None,  #TODO
+        "created": datetime.utcfromtimestamp(
+            human_task.created_at_in_seconds).isoformat() + "Z" if human_task.created_at_in_seconds else None,
+        "due": None,  # TODO
+        "followUp": None,  # TODO
         "description": human_task.task_name,  # Assuming task_name serves as the description
         "parentTaskId": None,  # No clear parent task id field in the model
         "priority": 50,  # Default to 50 since there's no priority field in the model
@@ -388,4 +378,3 @@ def format_human_task_response(human_task: HumanTaskModel, user_model: UserModel
         "taskDefinitionKey": human_task.task_id,  # Mapping taskDefinitionKey to task_id
         "tenantId": None  # TODO
     }
-

From b5508802647b866f64ece3a7ee4613d879e64e75 Mon Sep 17 00:00:00 2001
From: Sumesh Punakkal Kariyil <sumesh.pk@aot-technologies.com>
Date: Fri, 25 Oct 2024 09:38:50 -0700
Subject: [PATCH 08/10] Adding new endpoints

---
 .../f31dfbe97509_formsflow_role_mappings.py   | 115 ++++++++++
 .../src/spiffworkflow_backend/api.yml         |  75 +++++--
 .../routes/ff_tasks_controller.py             | 196 ++++++++++++------
 .../routes/tasks_controller.py                |  15 +-
 .../services/authorization_service.py         |  10 +-
 .../services/user_service.py                  |   3 +-
 6 files changed, 331 insertions(+), 83 deletions(-)
 create mode 100644 spiffworkflow-backend/migrations/versions/f31dfbe97509_formsflow_role_mappings.py

diff --git a/spiffworkflow-backend/migrations/versions/f31dfbe97509_formsflow_role_mappings.py b/spiffworkflow-backend/migrations/versions/f31dfbe97509_formsflow_role_mappings.py
new file mode 100644
index 000000000..4da0a1996
--- /dev/null
+++ b/spiffworkflow-backend/migrations/versions/f31dfbe97509_formsflow_role_mappings.py
@@ -0,0 +1,115 @@
+"""formsflow_role_mappings
+
+Revision ID: f31dfbe97509
+Revises: da22d9039670
+Create Date: 2024-10-23 13:56:41.952250
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision = 'f31dfbe97509'
+down_revision = 'da22d9039670'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    # Step 1: Check and insert into Group table
+    permission_targets = {}
+    groups = {}
+    for target_uri in ['/*', '/task-filters', '/task-filters/*', '/task/*', '/key/*', '/process-definition/*',
+                       '/process-definition', '/deployment/create']:
+        permission_target_id = op.get_bind().execute(sa.text(f"""
+                INSERT INTO permission_target (uri)
+                SELECT '{target_uri}' 
+                WHERE NOT EXISTS (SELECT id FROM permission_target WHERE uri = '{target_uri}')
+                RETURNING id;
+            """)).fetchone()
+
+        if permission_target_id is None:
+            permission_target_id = op.get_bind().execute(sa.text(f"""
+                    SELECT id FROM permission_target WHERE uri = '{target_uri}';
+                """)).fetchone()
+
+        permission_targets[target_uri] = permission_target_id[0]
+
+    for group in ['camunda-admin', 'view_filters', 'view_tasks', 'manage_tasks', 'create_submissions', 'view_designs',
+                  'create_designs']:
+        group_id = op.get_bind().execute(sa.text(f"""
+                        INSERT INTO "group" (identifier)
+                        SELECT '{group}' 
+                        WHERE NOT EXISTS (SELECT id FROM "group" WHERE identifier = '{group}')
+                        RETURNING id;
+                    """)).fetchone()
+
+        if group_id is None:
+            group_id = op.get_bind().execute(sa.text(f"""
+                            SELECT id FROM "group" WHERE identifier = '{group}';
+                        """)).fetchone()
+        group_id = group_id[0]
+        groups[group] = {"id": group_id}
+
+        # INSERT Into principal
+        principal_id = op.get_bind().execute(sa.text(f"""
+                                INSERT INTO principal (group_id)
+                                SELECT {group_id}
+                                WHERE NOT EXISTS (SELECT id FROM principal WHERE group_id = :group_id)
+                                RETURNING id;
+                            """), {'group_id': group_id}).fetchone()
+
+        if principal_id is None:
+            principal_id = op.get_bind().execute(sa.text(f"""
+                                    SELECT id FROM principal WHERE group_id = :group_id
+                                    
+                                """), {'group_id': group_id}).fetchone()
+        groups[group].update({"principal_id": principal_id[0]})
+
+    # Insert into permission_assignment
+    for permission_target_uri in permission_targets.keys():
+        if permission_target_uri == '/*':
+            # Allowed for all reads and create for camunda-admin
+            for grant_type in ['read', 'create']:
+                principal_id = groups['camunda-admin'].get("principal_id")
+                _insert_into_permission_assignment(grant_type, permission_targets[permission_target_uri], principal_id)
+
+        elif permission_target_uri in ["/task-filters", "/task-filters/*", "/task/*"]:
+            # Allowed for all reads and create for view_tasks
+            for grant_type in ['read', 'create']:
+                principal_id = groups['view_tasks'].get("principal_id")
+                _insert_into_permission_assignment(grant_type, permission_targets[permission_target_uri], principal_id)
+        elif permission_target_uri in ["/key/*"]:
+            # Allowed for all reads and create for create_submissions
+            for grant_type in ['read', 'create']:
+                principal_id = groups['create_submissions'].get("principal_id")
+                _insert_into_permission_assignment(grant_type, permission_targets[permission_target_uri], principal_id)
+        elif permission_target_uri in ["/process-definition/*", "/process-definition"]:
+            # Allowed for all reads and create for view_designs
+            for grant_type in ['read', 'create']:
+                principal_id = groups['view_designs'].get("principal_id")
+                _insert_into_permission_assignment(grant_type, permission_targets[permission_target_uri], principal_id)
+        elif permission_target_uri in ["/deployment/create"]:
+            # Allowed for all reads and create for create_designs
+            for grant_type in ['read', 'create']:
+                principal_id = groups['create_designs'].get("principal_id")
+                _insert_into_permission_assignment(grant_type, permission_targets[permission_target_uri], principal_id)
+
+
+def _insert_into_permission_assignment(grant_type, permission_target_id, principal_id):
+    permission_assignment = op.get_bind().execute(sa.text(f"""
+                                            SELECT id FROM permission_assignment 
+                                            WHERE principal_id = {principal_id} 
+                                            AND permission_target_id = {permission_target_id}
+                                            AND grant_type = 'permit'
+                                            AND permission = '{grant_type}';
+                                        """)).fetchone()
+    if not permission_assignment:
+        op.get_bind().execute(sa.text(f"""INSERT INTO permission_assignment 
+                                                (principal_id, permission_target_id, grant_type, permission)
+                                                VALUES ({principal_id}, {permission_target_id}, 'permit', '{grant_type}');
+                                            """))
+
+
+def downgrade():
+    pass
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
index 53617ce7d..26ac60071 100755
--- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
@@ -3459,21 +3459,28 @@ paths:
                               type: string
                     _embedded:
                       type: object
-#                      properties:
-#                        assignee:
-#                          type: array
-#                          items:
-#                            $ref: '#/components/schemas/FfUser'
-#                        processDefinition:
-#                          type: array
-#                          items:
-#                            $ref: '#/components/schemas/ProcessDefinition'
-#                        task:
-#                          type: array
-#                          items:
-#                            $ref: '#/components/schemas/FfTask'
                     count:
                       type: integer
+  /task-filters/count:
+    post:
+      summary: Retrieve filtered tasks count based on pagination and other criteria
+      operationId: spiffworkflow_backend.routes.ff_tasks_controller.filter_tasks_count
+      responses:
+        "200":
+          description: Successfully retrieved filtered tasks count
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: object
+                  properties:
+                    name:
+                      type: string
+                    count:
+                      type: integer
+                    id:
+                      type: integer
   /task/{task_id}:
     parameters:
       - name: task_id
@@ -3495,6 +3502,48 @@ paths:
               schema:
                 $ref: "#/components/schemas/Task"
 
+  /task/{task_id}/variables:
+    parameters:
+      - name: task_id
+        in: path
+        required: true
+        description: "The unique id of an existing task guid."
+        schema:
+          type: string
+    get:
+      tags:
+        - Tasks
+      operationId: spiffworkflow_backend.routes.ff_tasks_controller.get_task_variables_by_id
+      summary: "Get task variables"
+      responses:
+        "200":
+          description: "One task"
+          content:
+            application/json:
+              schema:
+                type: object
+
+  /task/{task_id}/identity-links:
+    parameters:
+      - name: task_id
+        in: path
+        required: true
+        description: "The unique id of an existing task guid."
+        schema:
+          type: string
+    get:
+      tags:
+        - Tasks
+      operationId: spiffworkflow_backend.routes.ff_tasks_controller.get_task_identity_links_by_id
+      summary: "Get identity links"
+      responses:
+        "200":
+          description: "One task"
+          content:
+            application/json:
+              schema:
+                type: array
+
 
   /task/{task_id}/claim:
     post:
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
index 4501754bc..f2755ad83 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
@@ -1,7 +1,7 @@
 from typing import Any
 import time
 from datetime import datetime
-from typing import Any
+from typing import Any, Dict
 from sqlalchemy import and_, asc, desc, cast
 from sqlalchemy.types import String
 
@@ -35,12 +35,63 @@
 from .tasks_controller import task_assign
 
 
-def filter_tasks(body: dict, firstResult: int = 1, maxResults: int = 100) -> flask.wrappers.Response:
-    """Filter tasks and return the list."""
+def filter_tasks(body: Dict, firstResult: int = 1, maxResults: int = 100) -> flask.wrappers.Response:
+    """Filter tasks and return the list and count."""
     if not body or body.get('criteria') is None:
         return None
     user_model: UserModel = g.user
 
+    human_tasks_query = build_human_tasks_query(body, user_model)
+
+    # Sorting logic
+    sorting_criteria = body.get('criteria', {}).get('sorting', [])
+    if sorting_criteria:
+        for sort_item in sorting_criteria:
+            sort_by = sort_item.get('sortBy')
+            sort_order = sort_item.get('sortOrder', 'asc')  # Default to ascending if not provided
+
+            if sort_by == 'created':
+                human_tasks_query = human_tasks_query.order_by(
+                    asc(HumanTaskModel.id),
+                    asc(HumanTaskModel.created_at_in_seconds) if sort_order == 'asc' else desc(HumanTaskModel.created_at_in_seconds)
+                )
+            elif sort_by == 'title':
+                human_tasks_query = human_tasks_query.order_by(
+                    asc(HumanTaskModel.id),
+                    asc(HumanTaskModel.task_title) if sort_order == 'asc' else desc(HumanTaskModel.task_title)
+                )
+
+    else:
+        human_tasks_query = human_tasks_query.order_by(desc(HumanTaskModel.id))  # Order by task ID
+
+    # Paginate results for task retrieval
+    human_tasks = human_tasks_query.paginate(page=firstResult, per_page=maxResults, error_out=False)
+
+    return _format_response(human_tasks)
+
+def filter_tasks_count(body: Dict) -> flask.wrappers.Response:
+    """Filter tasks and return only the count."""
+    user_model: UserModel = g.user
+    response = []
+
+    for criteria in body:
+        if not criteria or criteria.get('criteria') is None:
+            return None
+
+        human_tasks_query = build_human_tasks_query(criteria, user_model)
+
+        # Get the total count of tasks
+        task_count = human_tasks_query.count()
+        response.append({
+            "name": criteria.get("name"),
+            "count": task_count,
+            "id": criteria.get("id")
+        })
+
+    return response
+
+def build_human_tasks_query(body: Dict, user_model: UserModel):
+    """Build the base query for filtering tasks."""
     human_tasks_query = (
         db.session.query(
             HumanTaskModel, ProcessInstanceModel.id, ProcessModelInfo,
@@ -53,8 +104,7 @@ def filter_tasks(body: dict, firstResult: int = 1, maxResults: int = 100) -> fla
             HumanTaskModel.id,  # Group by the ID of the human task
             ProcessInstanceModel.id,  # Add the process instance ID to the GROUP BY clause
             ProcessModelInfo.process_id,
-            # GroupModel.identifier
-        )  # type: ignore
+        )
         .outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
         .join(ProcessInstanceModel)
         .join(ProcessModelInfo, ProcessModelInfo.id == ProcessInstanceModel.process_model_identifier)
@@ -71,7 +121,7 @@ def filter_tasks(body: dict, firstResult: int = 1, maxResults: int = 100) -> fla
         )
     )
 
-    # Join through HumanTaskUserModel to associate users to tasks
+    # Apply filters based on body criteria
     if body.get('criteria').get('candidateGroupsExpression') == '${currentUserGroups()}':
         human_tasks_query = human_tasks_query.filter(
             GroupModel.identifier.in_([group.identifier for group in user_model.groups]))
@@ -86,10 +136,8 @@ def filter_tasks(body: dict, firstResult: int = 1, maxResults: int = 100) -> fla
         human_tasks_query = human_tasks_query.filter(UserModel.username == user_model.username)
     if assignee := body.get('criteria').get('assignee'):
         human_tasks_query = human_tasks_query.filter(UserModel.username == assignee)
-    if assignee := body.get('criteria').get('assignee'):
-        human_tasks_query = human_tasks_query.filter(UserModel.username == assignee)
 
-    #  Filtering by process variables
+    # Filtering by process variables
     process_variables = body.get('criteria', {}).get('processVariables', [])
     if process_variables:
         for variable in process_variables:
@@ -98,44 +146,67 @@ def filter_tasks(body: dict, firstResult: int = 1, maxResults: int = 100) -> fla
             json_field = JsonDataModel.data['data'].op('->>')(var_name)
             human_tasks_query = human_tasks_query.filter(cast(json_field, String) == var_value)
 
-    # Sorting logic
-    sorting_criteria = body.get('criteria', {}).get('sorting', [])
-    if sorting_criteria:
-        for sort_item in sorting_criteria:
-            sort_by = sort_item.get('sortBy')
-            sort_order = sort_item.get('sortOrder', 'asc')  # Default to ascending if not provided
+    return human_tasks_query
 
-            if sort_by == 'created':
-                human_tasks_query = human_tasks_query.order_by(
-                    asc(HumanTaskModel.id),
-                    asc(HumanTaskModel.created_at_in_seconds) if sort_order == 'asc' else desc(HumanTaskModel.created_at_in_seconds)
-                )
-            elif sort_by == 'title':
-                human_tasks_query = human_tasks_query.order_by(
-                    asc(HumanTaskModel.id),
-                    asc(HumanTaskModel.task_title) if sort_order == 'asc' else desc(HumanTaskModel.task_title)
-                )
 
-    else:
-        human_tasks_query = human_tasks_query.order_by(desc(HumanTaskModel.id))  # Order by task ID
+def get_task_variables_by_id(
+        task_id: str
+) -> flask.wrappers.Response:
+    current_app.logger.debug("get_task_variables_by_id --->%s", task_id)
 
-    current_app.logger.info("human_tasks_query --->")
-    current_app.logger.info(human_tasks_query)
+    task : TaskModel = db.session.query(TaskModel).filter(TaskModel.guid == task_id).one_or_none()
 
-    human_tasks = human_tasks_query.paginate(page=firstResult, per_page=maxResults, error_out=False)
+    # If no tasks are found, return an empty list
+    if not task:
+        raise ApiError(
+            error_code="task_not_found",
+            message=f"Cannot find a task with id '{task_id}'",
+            status_code=400,
+        )
+    response = {}
+    for key in task.get_data().get("data").keys():
+        response[key] = {
+            "type" : "String", #TODO
+            "value": task.get_data().get("data").get(key)
+        }
+    return response
 
-    return _format_response(human_tasks)
+def get_task_identity_links_by_id(
+        task_id: str
+) -> flask.wrappers.Response:
+    current_app.logger.debug("get_task_identity_links_by_id --->%s", task_id)
+    db.session.query(HumanTaskModel, UserModel, GroupModel)
+    task_query = (
+        db.session.query(HumanTaskModel, UserModel, GroupModel)
+        .outerjoin(HumanTaskUserModel, and_(HumanTaskModel.id == HumanTaskUserModel.human_task_id,
+                                       HumanTaskUserModel.ended_at_in_seconds == None))
+        .outerjoin(UserModel, HumanTaskUserModel.user_id == UserModel.id)  # Join with UserModel to get user details
+        .outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
+        .filter(HumanTaskModel.task_guid == task_id)
+    )
+    tasks = task_query.all()
+
+    human_task, user, group = tasks[0]
+    response = [{
+            "userId": user.username if user else None,
+            "groupId": group.identifier,
+            "type": "candidate"
 
+    }]
+
+
+    return response
 
 def get_task_by_id(
         task_id: str
 ) -> flask.wrappers.Response:
-    # Query to join HumanTaskModel with HumanTaskUserModel
+    current_app.logger.debug("get_task_by_id --->%s", task_id)
+
     task_query = (
         db.session.query(HumanTaskModel, HumanTaskUserModel, UserModel)
-        .join(HumanTaskUserModel, and_(HumanTaskModel.id == HumanTaskUserModel.human_task_id,
+        .outerjoin(HumanTaskUserModel, and_(HumanTaskModel.id == HumanTaskUserModel.human_task_id,
                                        HumanTaskUserModel.ended_at_in_seconds == None))
-        .join(UserModel, HumanTaskUserModel.user_id == UserModel.id)  # Join with UserModel to get user details
+        .outerjoin(UserModel, HumanTaskUserModel.user_id == UserModel.id)  # Join with UserModel to get user details
         .filter(HumanTaskModel.task_guid == task_id)
     )
 
@@ -148,7 +219,7 @@ def get_task_by_id(
             message=f"Cannot find a task with id '{task_id}'",
             status_code=400,
         )
-    if not len(tasks) > 1:
+    if len(tasks) > 1:
         raise ApiError(
             error_code="more_than_one_task_found",
             message=f"More tasks found for '{task_id}'",
@@ -158,29 +229,37 @@ def get_task_by_id(
     return make_response(jsonify(format_human_task_response(human_task, user_model)), 200)
 
 
+
 def claim_task(
         task_id: str,
-        body: dict[str, Any],
+        body: Dict[str, Any],
 ) -> flask.wrappers.Response:
-    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(id=task_id).one_or_none()
+    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(task_guid=task_id).one_or_none()
     if task_model is None:
         raise ApiError(
             error_code="task_not_found",
             message=f"Cannot find a task with id '{task_id}'",
             status_code=400,
         )
+    user_model: UserModel = UserModel.query.filter_by(username=body.get("userId")).one_or_none()
+    if user_model is None: #TODO decide if we need to create a dummy user in this case.
+        raise ApiError(
+            error_code="user_not_found",
+            message=f"Cannot find a user with id '{task_id}'",
+            status_code=400,
+        )
 
     task_assign(modified_process_model_identifier=None, process_instance_id=task_model.process_instance_id,
-                task_guid=task_model.task_guid, body={'user_ids': [body.get("userId")]})
+                task_guid=task_model.task_guid, body={'user_ids': [user_model.id]})
 
-    return make_response(jsonify(format_human_task_response(task_model)), 200)
+    return {}
 
 
 def unclaim_task(
         task_id: str,
-        body: dict[str, Any],
+        body: Dict[str, Any],
 ) -> flask.wrappers.Response:
-    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(id=task_id).one_or_none()
+    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(task_guid=task_id).one_or_none()
     if task_model is None:
         raise ApiError(
             error_code="task_not_found",
@@ -198,23 +277,11 @@ def unclaim_task(
     return make_response(jsonify({"ok": True}), 200)
 
 
-def get_task_variables(  # TODO
-        task_id: int
-) -> flask.wrappers.Response:
-    pass
-
-
-def get_task_identity_links(  # TODO
-        task_id: int
-) -> flask.wrappers.Response:
-    pass
-
-
 def submit_task(
         task_id: str,
-        body: dict[str, Any],
+        body: Dict[str, Any],
 ) -> flask.wrappers.Response:
-    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(id=task_id).one_or_none()
+    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(task_guid=task_id).one_or_none()
     if task_model is None:
         raise ApiError(
             error_code="task_not_found",
@@ -222,11 +289,24 @@ def submit_task(
             status_code=400,
         )
     # TODO Manage task variables submitted.
+
     with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"):
         response_item = _task_submit_shared(task_model.process_instance_id, task_model.task_guid, body)
         return make_response(jsonify(response_item), 200)
 
 
+def _format_task_variables(task_data: Dict):
+    variables = []
+    for key in task_data.get("data"):
+        variables.append({
+            "name": key,
+            "value": task_data.get("data")[key],
+            "type": "String" #TODO Dynamically derive this from the element value
+        })
+    return variables
+
+
+
 def _format_response(human_tasks):
     response = []
 
@@ -254,9 +334,7 @@ def _format_response(human_tasks):
                         "taskId": task.HumanTaskModel.id
                     }
                 ],
-                "variable": [
-                    task.HumanTaskModel.task_model.get_data()  # TODO adjust to match with Camunda response
-                ]
+                "variable": _format_task_variables(task.HumanTaskModel.task_model.get_data())
             },
             "id": task.HumanTaskModel.task_guid,
             "name": task.HumanTaskModel.task_title,
@@ -365,7 +443,7 @@ def format_human_task_response(human_task: HumanTaskModel, user_model: UserModel
     return {
         "id": human_task.task_guid,
         "name": human_task.task_title or human_task.task_name,
-        "assignee": user_model.username,
+        "assignee": user_model.username if user_model else None,
         "created": datetime.utcfromtimestamp(
             human_task.created_at_in_seconds).isoformat() + "Z" if human_task.created_at_in_seconds else None,
         "due": None,  # TODO
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
index dd74c81da..2a893ce9c 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
@@ -348,12 +348,12 @@ def task_assign(
 ) -> Response:
     process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
 
-    if process_instance.status != ProcessInstanceStatus.suspended.value:
-        raise ApiError(
-            error_code="error_not_suspended",
-            message="The process instance must be suspended to perform this operation",
-            status_code=400,
-        )
+    # if process_instance.status != ProcessInstanceStatus.suspended.value:
+    #     raise ApiError(
+    #         error_code="error_not_suspended",
+    #         message="The process instance must be suspended to perform this operation",
+    #         status_code=400,
+    #     )
 
     if "user_ids" not in body:
         raise ApiError(
@@ -387,6 +387,9 @@ def task_assign(
         if human_task_user is None:
             human_task_user = HumanTaskUserModel(user_id=user_id, human_task=human_task, created_at_in_seconds=round(time.time()))
             db.session.add(human_task_user)
+        else: #TODO adding this to overcome the issue with constraints. If task already had been allocated to this user, just empty the ended_at_in_secods.
+            human_task_user.ended_at_in_seconds=None
+
 
     SpiffworkflowBaseDBModel.commit_with_rollback_on_exception()
 
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py
index 3b11a1465..e7ffd31b1 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py
@@ -105,7 +105,7 @@ def has_permission(cls, principals: list[PrincipalModel], permission: str, targe
             .filter(
                 or_(
                     # found from https://stackoverflow.com/a/46783555
-                    literal(target_uri_normalized).like(PermissionTargetModel.uri),
+                    literal(target_uri_normalized).like(func.REPLACE(PermissionTargetModel.uri, '*', '%')),
                     # to check for exact matches as well
                     # see test_user_can_access_base_path_when_given_wildcard_permission unit test
                     func.REPLACE(func.REPLACE(PermissionTargetModel.uri, "/%", ""), ":%", "") == target_uri_normalized,
@@ -476,6 +476,8 @@ def create_user_from_sign_in(cls, user_info: dict) -> UserModel:
             if "groups" in user_info:
                 desired_group_identifiers = user_info["groups"]
                 desired_group_identifiers = [desired_group_identifier.lstrip("/") for desired_group_identifier in desired_group_identifiers]
+            if "role" in user_info:
+                desired_group_identifiers + user_info.get('role', [])
 
         for field_index, tenant_specific_field in enumerate(
             current_app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"]
@@ -527,9 +529,9 @@ def create_user_from_sign_in(cls, user_info: dict) -> UserModel:
         # before the user signs in, because we won't know things like
         # the external service user identifier.
         cls.import_permissions_from_yaml_file(user_model)
-
-        if is_new_user:
-            UserService.add_user_to_human_tasks_if_appropriate(user_model)
+        # Commenting this out as we don't want to automatically assign tasks to users
+        # if is_new_user:
+        #     UserService.add_user_to_human_tasks_if_appropriate(user_model)
 
         # this cannot be None so ignore mypy
         return user_model  # type: ignore
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py
index 169c1d101..05058ccdf 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py
@@ -147,7 +147,8 @@ def sync_user_with_token(cls, token_info, user_model):
         if not token_info or not user_model:
             return
         # Create group if it doesn't exist
-        token_groups = token_info.get('groups') or token_info.get('roles')
+        # TODO Remove groups not present in the token
+        token_groups = token_info.get('groups', []) + token_info.get('role', [])
         for token_group in token_groups:
             token_group = token_group.lstrip("/")
             group: GroupModel = GroupModel.query.filter_by(identifier=token_group).one_or_none()

From d07714db7c1c563995b480fe722790d7bba8d2db Mon Sep 17 00:00:00 2001
From: Sumesh Punakkal Kariyil <sumesh.pk@aot-technologies.com>
Date: Fri, 25 Oct 2024 14:50:20 -0700
Subject: [PATCH 09/10] Changes for submit task

---
 .../routes/ff_tasks_controller.py             | 23 +++++++++++++++----
 .../routes/process_instances_controller.py    | 23 -------------------
 .../services/user_service.py                  |  9 +++++++-
 3 files changed, 26 insertions(+), 29 deletions(-)

diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
index f2755ad83..06c376ea8 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
@@ -1,9 +1,12 @@
 from typing import Any
 import time
+import json
+import copy
 from datetime import datetime
 from typing import Any, Dict
 from sqlalchemy import and_, asc, desc, cast
 from sqlalchemy.types import String
+from hashlib import sha256
 
 import flask.wrappers
 import sentry_sdk
@@ -281,17 +284,27 @@ def submit_task(
         task_id: str,
         body: Dict[str, Any],
 ) -> flask.wrappers.Response:
-    task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(task_guid=task_id).one_or_none()
-    if task_model is None:
+    human_task_model: HumanTaskModel | None = HumanTaskModel.query.filter_by(task_guid=task_id).one_or_none()
+    if human_task_model is None:
         raise ApiError(
             error_code="task_not_found",
             message=f"Cannot find a task with id '{task_id}'",
             status_code=400,
         )
-    # TODO Manage task variables submitted.
-
+    # Manage task variables submitted.
+    task_model: TaskModel = TaskModel.query.filter_by(guid=task_id).one_or_none()
+    # First update the variables and then submit task
+    data = copy.deepcopy(task_model.get_data())
+    for var in body.get("variables").keys():
+        data["data"][var] = body.get("variables")[var]["value"]
+
+    json_data_hash = sha256(json.dumps(data).encode("utf8")).hexdigest()
+    json_data_model = JsonDataModel(hash=json_data_hash, data=data)
+    db.session.add(json_data_model)
+    db.session.flush()
+    task_model.json_data_hash = json_data_hash
     with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"):
-        response_item = _task_submit_shared(task_model.process_instance_id, task_model.task_guid, body)
+        response_item = _task_submit_shared(task_model.process_instance_id, task_model.guid, body)
         return make_response(jsonify(response_item), 200)
 
 
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py
index 50ea38987..2dafac976 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py
@@ -82,29 +82,6 @@ def process_instance_start(
     current_app.logger.info("running the instance")
     process_instance_response = process_instance_run(process_model_identifier, process_instance.id, force_run, execution_mode)
 
-    # Create a dummy task to hold the process instance data
-    blank_json = json.dumps({})
-    blank_json_data_hash = sha256(blank_json.encode("utf8")).hexdigest()
-    json_data_hash = sha256(json.dumps(body).encode("utf8")).hexdigest()
-    # Find the task definition for the start event and use it
-    print("process_instance.bpmn_process_definition_id ", process_instance.bpmn_process_definition_id)
-    task_def_model: TaskDefinitionModel = TaskDefinitionModel.query.filter_by(typename='StartEvent',
-                                                                              bpmn_process_definition_id=process_instance.bpmn_process_definition_id).first()
-
-    TaskModel(
-        guid=uuid.uuid4(),
-        bpmn_process_id=process_instance.bpmn_process_id,
-        process_instance_id=process_instance.id,
-        task_definition_id=task_def_model.id,
-        state='COMPLETED',
-        properties_json={},
-        start_in_seconds=time.time(),
-        end_in_seconds=time.time(),
-        json_data_hash=json_data_hash,
-        python_env_data_hash=blank_json_data_hash,
-        data=body
-    )
-
     return process_instance_response
 
 
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py
index 05058ccdf..0266e7266 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py
@@ -147,7 +147,7 @@ def sync_user_with_token(cls, token_info, user_model):
         if not token_info or not user_model:
             return
         # Create group if it doesn't exist
-        # TODO Remove groups not present in the token
+        current_user_group_assignments = []
         token_groups = token_info.get('groups', []) + token_info.get('role', [])
         for token_group in token_groups:
             token_group = token_group.lstrip("/")
@@ -166,6 +166,13 @@ def sync_user_with_token(cls, token_info, user_model):
             if not principal:
                 principal = PrincipalModel(group_id=group.id)
                 db.session.add(principal)
+            current_user_group_assignments.append(group.id)
+        # Now query and delete all user assignments which are not in current_user_group_assignments for this user.
+        db.session.query(UserGroupAssignmentModel).filter(
+            UserGroupAssignmentModel.user_id == user_model.id,
+            UserGroupAssignmentModel.group_id.notin_(current_user_group_assignments)
+        ).delete(synchronize_session='fetch')
+
         db.session.commit()
 
     @classmethod

From cf5cd281fcf35b17f21c3b712499cf65e199a5d8 Mon Sep 17 00:00:00 2001
From: Sumesh Punakkal Kariyil <sumesh.pk@aot-technologies.com>
Date: Tue, 19 Nov 2024 12:05:26 -0800
Subject: [PATCH 10/10] Changes for tasks count

---
 .../spiffworkflow_backend/routes/ff_tasks_controller.py   | 3 ++-
 .../src/spiffworkflow_backend/routes/public_controller.py | 1 +
 .../services/process_instance_processor.py                | 8 ++++----
 3 files changed, 7 insertions(+), 5 deletions(-)

diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
index 06c376ea8..e0b421d4b 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/ff_tasks_controller.py
@@ -68,7 +68,8 @@ def filter_tasks(body: Dict, firstResult: int = 1, maxResults: int = 100) -> fla
         human_tasks_query = human_tasks_query.order_by(desc(HumanTaskModel.id))  # Order by task ID
 
     # Paginate results for task retrieval
-    human_tasks = human_tasks_query.paginate(page=firstResult, per_page=maxResults, error_out=False)
+    page = (firstResult // maxResults) + 1
+    human_tasks = human_tasks_query.paginate(page=page, per_page=maxResults, error_out=False)
 
     return _format_response(human_tasks)
 
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/public_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/public_controller.py
index 25d4d9551..e7a453523 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/public_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/public_controller.py
@@ -253,6 +253,7 @@ def _assign_task_if_guest(task_model: TaskModel) -> bool:
                     status_code=400,
                 )
             )
+        print(f"Assigning TASK : {human_task} to USER : {g.user_id}")
         human_task_user = HumanTaskUserModel(user_id=g.user.id, human_task=human_task)
         db.session.add(human_task_user)
         db.session.commit()
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
index 37d6cf983..51fbc4738 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
@@ -1246,10 +1246,10 @@ def save(self) -> None:
                         lane_assignment_id=potential_owner_hash["lane_assignment_id"],
                     )
                     db.session.add(human_task)
-
-                    for potential_owner_id in potential_owner_hash["potential_owner_ids"]:
-                        human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task)
-                        db.session.add(human_task_user)
+                    # Not needed for formsflow.ai
+                    # for potential_owner_id in potential_owner_hash["potential_owner_ids"]:
+                    #     human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task)
+                    #     db.session.add(human_task_user)
 
         if len(human_tasks) > 0:
             for at in human_tasks: