diff --git a/.github/vale/styles/Viam/AvoidWithSubstitutionViam.yml b/.github/vale/styles/Viam/AvoidWithSubstitutionViam.yml index ae000139e4..e91df6c7ae 100644 --- a/.github/vale/styles/Viam/AvoidWithSubstitutionViam.yml +++ b/.github/vale/styles/Viam/AvoidWithSubstitutionViam.yml @@ -7,7 +7,6 @@ action: name: replace swap: in the website: on the website - web app: Viam app or Viam platform user of an org: member of an org compute parts: Computer microprocessor: Raspberry Pi or Jetson or another specific term diff --git a/.github/workflows/check_python_methods.py b/.github/workflows/check_python_methods.py index bdd53309d3..bdbf624d5a 100644 --- a/.github/workflows/check_python_methods.py +++ b/.github/workflows/check_python_methods.py @@ -235,7 +235,7 @@ def parse(type, names): # Parse the Docs site's service page if args.local: if type == "app" or type == "robot": - with open(f"dist/appendix/apis/{service}/index.html") as fp: + with open(f"dist/dev/reference/apis/{service}/index.html") as fp: soup2 = BeautifulSoup(fp, 'html.parser') else: if service in services_page_mapping.keys(): diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 35e7be67ba..3e817e72c3 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -47,23 +47,17 @@ jobs: name: tutorials-file path: ./public/tutorials/typesense.json - - name: Upload how-tos search file - uses: actions/upload-artifact@v4 - with: - name: howtos-file - path: ./public/how-tos/typesense.json - - name: Upload components models file uses: actions/upload-artifact@v4 with: name: components-models-file - path: ./public/components/typesense.json + path: ./public/operate/reference/components/typesense.json - - name: Upload services models file - uses: actions/upload-artifact@v4 - with: - name: services-models-file - path: ./public/services/typesense.json + # - name: Upload services models file + # uses: actions/upload-artifact@v4 + # with: + # name: services-models-file + # path: ./public/operate/reference/services/typesense.json deploy: environment: @@ -113,11 +107,6 @@ jobs: with: python-version: "3.9" cache: "pip" # caching pip dependencies - - name: Download the how-tos file - uses: actions/download-artifact@v4 - with: - name: howtos-file - path: "how-tos" - name: Download the tutorials file uses: actions/download-artifact@v4 with: @@ -145,11 +134,11 @@ jobs: with: name: components-models-file path: "components" - - name: Download the services models file - uses: actions/download-artifact@v4 - with: - name: services-models-file - path: "services" + # - name: Download the services models file + # uses: actions/download-artifact@v4 + # with: + # name: services-models-file + # path: "services" - run: pip install -r .github/workflows/requirements.txt - name: "Update modular resources" run: python3 .github/workflows/get_modular_resources.py diff --git a/.github/workflows/get_modular_resources.py b/.github/workflows/get_modular_resources.py index da5736d5ad..74ac79e971 100644 --- a/.github/workflows/get_modular_resources.py +++ b/.github/workflows/get_modular_resources.py @@ -101,19 +101,19 @@ async def main(): print("INSERTED") print(insert_resp) - # Get built-in resources from services/typesense.json - with open('services/typesense.json') as f: - resources = json.load(f) - for r in resources: - print("RESOURCE") - print(r) - r["last_updated"] = time_now - r["total_organization_usage"] = int(r["total_organization_usage"]) - r["total_robot_usage"] = int(r["total_robot_usage"]) - print(r) - insert_resp = typesense_client.collections['resources'].documents.upsert(r) - print("INSERTED") - print(insert_resp) + # # Get built-in resources from services/typesense.json + # with open('services/typesense.json') as f: + # resources = json.load(f) + # for r in resources: + # print("RESOURCE") + # print(r) + # r["last_updated"] = time_now + # r["total_organization_usage"] = int(r["total_organization_usage"]) + # r["total_robot_usage"] = int(r["total_robot_usage"]) + # print(r) + # insert_resp = typesense_client.collections['resources'].documents.upsert(r) + # print("INSERTED") + # print(insert_resp) # Create a request to list registry items and get the response from the app request = ListRegistryItemsRequest(organization_id=cloud._organization_id) diff --git a/.github/workflows/update_sdk_methods.py b/.github/workflows/update_sdk_methods.py index 62b9c02446..9f1ae8ea80 100755 --- a/.github/workflows/update_sdk_methods.py +++ b/.github/workflows/update_sdk_methods.py @@ -435,10 +435,10 @@ "SLAM service": "/services/slam/", "frame": "/services/frame-system/", "Viam app": "https://app.viam.com/", - "organization settings page": "/cloud/organizations/", - "image tags": "/fleet/dataset/#image-tags", + "organization settings page": "/manage/reference/organize/", + "image tags": "/data-ai/ai/create-dataset/#label-your-images", "API key": "/fleet/cli/#authenticate", - "board model": "/components/board/#configuration" + "board model": "/dev/reference/apis/components/board/" } ## Language-specific resource name overrides: @@ -617,7 +617,7 @@ def link_data_types(sdk, data_type_string): ## Link matching text, used in write_markdown(): ## NOTE: Currently does not support formatting for link titles -## (EXAMPLE: bolded DATA tab here: https://docs.viam.com/appendix/apis/data-client/#binarydatabyfilter) +## (EXAMPLE: bolded DATA tab here: https://docs.viam.com/dev/reference/apis/data-client/#binarydatabyfilter) def link_description(format_type, full_description, link_text, link_url): ## Supports 'md' link styling or 'html' link styling. @@ -1830,23 +1830,23 @@ def write_markdown(type, names, methods): if type == 'component': ## Replace underscores, and convert generic_component to just generic: resource_adjusted = resource.replace('generic_component', 'generic').replace('_','-') - proto_anchor_link = '/appendix/apis/components/' + resource_adjusted + '/#' + proto_link + proto_anchor_link = '/dev/reference/apis/components/' + resource_adjusted + '/#' + proto_link elif type == 'service' and resource in ['base_remote_control', 'motion', 'navigation', 'slam', 'vision']: - proto_anchor_link = '/appendix/apis/services/' + resource.replace('base_remote_control', 'base-rc') + '/#' + proto_link + proto_anchor_link = '/dev/reference/apis/services/' + resource.replace('base_remote_control', 'base-rc') + '/#' + proto_link elif type == 'service' and resource == 'data_manager': - proto_anchor_link = '/appendix/apis/services/data/#' + proto_link + proto_anchor_link = '/dev/reference/apis/services/data/#' + proto_link elif type == 'service' and resource == 'generic_service': - proto_anchor_link = '/appendix/apis/services/generic/#' + proto_link + proto_anchor_link = '/dev/reference/apis/services/generic/#' + proto_link elif type == 'service' and resource == 'mlmodel': - proto_anchor_link = '/appendix/apis/services/ml/#' + proto_link + proto_anchor_link = '/dev/reference/apis/services/ml/#' + proto_link elif type == 'app' and resource == 'app': - proto_anchor_link = '/appendix/apis/fleet/#' + proto_link + proto_anchor_link = '/dev/reference/apis/fleet/#' + proto_link elif type == 'app' and resource in ["billing", "mltraining"]: - proto_anchor_link = '/appendix/apis/' + resource.replace('mltraining','ml-training') + '-client/#' + proto_link + proto_anchor_link = '/dev/reference/apis/' + resource.replace('mltraining','ml-training') + '-client/#' + proto_link elif type == 'app' and resource in ["data", "dataset", "data_sync"]: - proto_anchor_link = '/appendix/apis/data-client/#' + proto_link + proto_anchor_link = '/dev/reference/apis/data-client/#' + proto_link elif type == 'robot': - proto_anchor_link = '/appendix/apis/' + resource + '/#' + proto_link + proto_anchor_link = '/dev/reference/apis/' + resource + '/#' + proto_link ## Fetch just the first sentence from the proto_override_file (first text string terminated by '.\n'), ignoring hugo ## shortcodes like alerts ('{{%.*%}}.*{{% \[a-b].* %}}'), which precede some override files' (proto descriptions') diff --git a/.github/workflows/upload_tutorials.py b/.github/workflows/upload_tutorials.py index 814265b363..e9198871b9 100644 --- a/.github/workflows/upload_tutorials.py +++ b/.github/workflows/upload_tutorials.py @@ -31,19 +31,6 @@ async def main(): print("INSERTED") print(insert_resp) - # Get how-tos from how-tos/typesense.json - with open('how-tos/typesense.json') as f: - resources = json.load(f) - for r in resources: - print("RESOURCE") - r["date"] = int(r["date"]) - print(r) - r["last_updated"] = time_now - print(r) - insert_resp = typesense_client.collections['tutorials'].documents.upsert(r) - print("INSERTED") - print(insert_resp) - # Deleting documents that didn't get updated (presumably deleted) try: res = typesense_client.collections['tutorials'].documents.delete({'filter_by': 'last_updated: <' + str(time_now)}) diff --git a/assets/build/program/sdks/log-level-info.png b/assets/build/program/sdks/log-level-info.png index c65ae0d692..3bff97cee3 100644 Binary files a/assets/build/program/sdks/log-level-info.png and b/assets/build/program/sdks/log-level-info.png differ diff --git a/assets/icons/logo.svg b/assets/icons/logo.svg index b1539c8555..0eb8b7c13d 100644 --- a/assets/icons/logo.svg +++ b/assets/icons/logo.svg @@ -3,7 +3,7 @@ @@ -215,8 +213,6 @@ search.addWidgets([ sortBy: ["name:asc"], items: [ { label: "tutorial" }, - { label: "how-to" }, - { label: "quickstart" }, { label: "blogpost" }, { label: "codelab" }, ], diff --git a/assets/registry/restart-module.png b/assets/registry/restart-module.png new file mode 100644 index 0000000000..d256bb689d Binary files /dev/null and b/assets/registry/restart-module.png differ diff --git a/assets/scss/_sidebar-tree.scss b/assets/scss/_sidebar-tree.scss index c58f197777..159cf1abe3 100644 --- a/assets/scss/_sidebar-tree.scss +++ b/assets/scss/_sidebar-tree.scss @@ -6,6 +6,7 @@ margin-right: -15px; margin-left: -15px; font-size: 1rem; + font-weight: 300; @include media-breakpoint-up(md) { @supports (position: sticky) { @@ -63,7 +64,7 @@ a, .emptynode { display: inline-block; padding-bottom: 0.375rem; - color: black; + color: #333333; text-decoration: none; &:hover { @@ -96,6 +97,7 @@ li i { // Layout of icons padding-right: 0.5em; + -webkit-text-stroke: 1px whitesmoke; &:before{ display: inline-block; text-align: center; @@ -112,7 +114,7 @@ .td-sidebar { @include media-breakpoint-up(md) { - padding-top: 4rem; + padding-top: 6rem; background-color: $td-sidebar-bg-color; padding-right: 1rem; border-right: 1px solid $td-sidebar-border-color; @@ -170,4 +172,15 @@ li .indent { font-size: 0.833rem; padding-left: 0.75rem; +} + +@media (min-width: 768px) { + .header-only > span > span.emptynode:hover { + color: #aaa; + } + + .ul-2 > li:not(:last-child) { + padding-bottom: 8px; + border-bottom: 1px solid #ccc; + } } \ No newline at end of file diff --git a/assets/scss/_styles_project.scss b/assets/scss/_styles_project.scss index 54b8d8ca7b..0591411731 100644 --- a/assets/scss/_styles_project.scss +++ b/assets/scss/_styles_project.scss @@ -18,7 +18,7 @@ a.footnote-ref::after { /* START Adjust Heading sizes*/ .td-navbar { - background: black !important; + background-color: white !important; } /* This ensures there is no padding added to the top logo bar */ @@ -35,15 +35,28 @@ a.footnote-ref::after { font-size: 0.833rem; line-height: 1.667em; letter-spacing: 0.1875rem; - padding: 5px 10px; + padding: 5px 8px; text-decoration: none; text-transform: uppercase; white-space: nowrap; a { - color: white; + color: black; } } +.td-navbar .navbar-brand span:last-child { + vertical-align: middle; +} + +.navbar-dark .navbar-brand, .navbar-dark .navbar-brand:focus, .navbar-dark .navbar-brand:hover { + color: black; +} + +.td-navbar .navbar-brand svg { + height: 16px; + margin: 0; +} + .td-navbar .nav-link { font-family: Space Mono, sans-serif; font-weight: 400; @@ -55,16 +68,17 @@ a.footnote-ref::after { } h1, .h1 { - font-family: Space Mono, sans-serif; - font-size: 2.667rem; // 32pt + font-family: Space grotesk, sans-serif; + font-size: 2.625rem; // 32pt margin-left: 0px; + font-weight: 500; } h2, .h2 { - font-family: space grotesk, sans-serif; - font-size: 2.0rem !important; // 24pt - line-height: 2.667rem; - font-weight: 600; + font-family: Public sans, sans-serif; + font-size: 1.25rem !important; // 20px + line-height: 1.25rem; + font-weight: 500; clear: both; } @@ -78,26 +92,26 @@ h2, .h2 { h3, .h3 { font-family: space grotesk, sans-serif; - font-size: 1.6667rem !important; // 20pt + font-size: 1.1667rem !important; // 20pt font-weight: 400; clear: both; } h4, .h4 { font-family: space grotesk, sans-serif; - font-size: 1.3333rem !important; // 16pt + font-size: 1.0rem !important; // 16pt line-height: 1.4375em; clear: both; } h5, .h5 { font-family: space grotesk, sans-serif; - font-size: 1.1667rem !important; // 14pt + font-size: 0.933rem !important; // 14pt clear: both; } h6, .h6 { - font-size: 1.0rem !important; + font-size: 0.933rem !important; line-height: 1.0625rem; // 12pt clear: both; } @@ -157,7 +171,6 @@ h7, .h7 { .td-toc ul li a { font-size: 0.833rem; - font-weight: 500; color: #333333; text-decoration: none; } @@ -213,6 +226,14 @@ a > code { color: #333333 !important; } +.nav-fold * #TableOfContents ul li ul { + padding: 0; +} + +.nav-fold * #TableOfContents ul li ul.is-collapsed { + display: none; +} + p, li { color: #515151; a { @@ -232,17 +253,25 @@ p, li { // START FOOTER STYLING +footer { + border-top: 1px solid rgba(0, 0, 0, 0.1); +} + footer a { color: #fff !important; } /* sets the footer to black. */ .bg-dark { - background-color: black !important; + background-color: white !important; } footer * small { - color: white; + color: black; +} + +.text-white, footer a { + color: black !important; } /* START no whitespace beneath footer */ @@ -261,9 +290,10 @@ footer small { /* sets the base font heavier than the theme. */ .td-content p, .td-content li, .td-content td { + font-family: "Public Sans light", -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; font-weight: normal; - font-size: 1rem !important; - line-height: 1.5em !important; + font-size: 0.938rem !important; + line-height: 1.625rem !important; } /* @@ -338,6 +368,7 @@ END MARKETING CSS color: black; border: 1px solid black; margin-right: 5px; + padding: 1rem 1.25rem; @each $color, $value in $theme-colors { &-#{$color} { @@ -351,8 +382,12 @@ END MARKETING CSS } } +.alert > ul:last-child, .alert > p:last-child { + margin-bottom: 0 !important; +} + .td-content .alert:not(:first-child) { - margin-top: 1.5rem; + margin-top: 37px; margin-bottom: 1.5rem; clear: both; } @@ -437,7 +472,7 @@ END MARKETING CSS .expand-label { background-color: whitesmoke; - border: black solid 1px; + border: #d9d9d9 solid 1px; padding: 0.5rem 1rem 0.5rem 1rem; i { font-size: inherit !important; @@ -449,20 +484,25 @@ END MARKETING CSS .expand-label span h5 { display: inline; font-family: Public Sans,system-ui; + font-size: 0.938rem !important; +} + +.tablestep * .expand-label, .tablestep * .expand-content { + background-color: white !important; } .expand-content { - border-top: black solid 0px; - border-left: black solid 1px; - border-right: black solid 1px; - border-bottom: black solid 1px; + border-top: #d9d9d9 solid 0px; + border-left: #d9d9d9 solid 1px; + border-right: #d9d9d9 solid 1px; + border-bottom: #d9d9d9 solid 1px; background: whitesmoke; padding: 1rem; overflow: hidden; } .expand { - padding-bottom: 1rem; + padding-bottom: 0.75rem; clear: both; } @@ -481,10 +521,15 @@ END MARKETING CSS details * i.fa-chevron-down { display: none; } + details * i.fa-chevron-right { display: unset; } +details * i { + -webkit-text-stroke: 1px whitesmoke; +} + details[open] * i.fa-chevron-down { display: unset; } @@ -527,6 +572,18 @@ details summary::-webkit-details-marker { background-color: #eaf9fb; } +.expand.howtoexpand.light summary, .expand.howtoexpand.light .expand-content { + background-color: rgba(200, 249, 183, 1.0) !important; +} + +.expand.howtoexpand.middle summary, .expand.howtoexpand.middle .expand-content { + background-color: rgba(189, 234, 196, 1.0) !important; +} + +.expand.howtoexpand.dark summary, .expand.howtoexpand.dark .expand-content { + background-color: rgba(163, 203, 178, 1.0) !important; +} + .howtoexpand .expand-label { padding-top: 1rem; padding-bottom: 0; @@ -613,11 +670,12 @@ details summary::-webkit-details-marker { // Hover animations and colors .hover-card > a, .hover-card > .hovercardcontainer { - border: 1px solid #000; + border: 1px solid #dee2e6; border-radius: 2px; padding: 1rem; margin: 0px; transition: all .2s; + width: 100%; } .hover-card.link-with-icon { @@ -632,7 +690,7 @@ details summary::-webkit-details-marker { -webkit-transform: translate(-3px,-3px); -ms-transform: translate(-3px,-3px); transform: translate(-3px,-3px); - box-shadow: 5px 5px 0 0 #000; + box-shadow: 5px 5px 0 0 #dee2e6; } .hover-card.yellow:hover > a { @@ -1030,7 +1088,7 @@ td > ul, td > ol { margin-right: -1px; border-bottom: 1px solid rgb(209, 213, 219); border-top: 1px solid rgb(209, 213, 219); - padding: 0.25rem 0.75rem; + padding: 0.5rem 1.5rem; color: rgb(78, 79, 82); background-color: rgb(241, 241, 244); } @@ -1402,6 +1460,16 @@ ul.sectionlist { padding: 0; } +ul.sectionlist.horizontal { + display: flex; + gap: 50px; +} + +ul.sectionlist.horizontal > li { + padding-left: 0.5rem; + padding-right: 0.5rem; +} + ul.sectionlist > li { list-style-type: none; } @@ -2125,6 +2193,23 @@ div .filterable > p { overflow: auto; } +// Fix tables in steps and expanders + +.tablestep > table, .expand-content > table { + min-width: 100% !important; +} + +.tablestep > table td, .tablestep > table th, .expand-content > table td, .expand-content > table th { + padding: 0.75rem; + vertical-align: top; + border-top: 1px solid #dee2e6; +} + +.tablestep * tbody > tr:nth-of-type(odd) { + background-color: rgba(0, 0, 0, 0.05) !important; +} + + .usecase-table .tablestep:nth-of-type(odd) { background-color: whitesmoke; border-top: 1px solid #d7d7d9; @@ -2556,3 +2641,329 @@ div.explanation > .explanationvisual > .gif { opacity: 0; transition: opacity 250ms ease-out; } + +// Top level nav START +nav { + display: flex; + flex-direction: column; +} + +.navbar { + padding: 0; +} + +.navcontainer { + min-width: 100%; + display: flex; + padding: 0.25rem 1rem; + min-height: 50px; + background-color: white; +} + +.td-topbar-sections { + margin-right: auto !important; + margin-left: 0 !important; + line-height: unset !important; +} + +.td-topbar-sections > ul { + padding: 0; + padding-left: 1.25rem; + margin: 0.25rem 0 0 0; + border-left: 1px solid black; +} + +.td-topbar-sections > ul > li { + padding-right: 1.5rem; + display: inline-block; + text-transform: uppercase; + font-family: Roboto Mono Variable,Roboto Mono,ui-monospace,monospace; + font-size: .875rem; + line-height: 1.25rem; +} + +.td-topbar-sections > ul > li > a { + color: black; + padding-bottom: 8px; + padding-top: 8px; +} + +.td-topbar-sections > ul > li > a:hover { + text-decoration: none; +} + +.second-nav { + background-color: rgb(239,239,242); + min-width: 100%; + padding: 0.25rem 1rem; +} + +.second-nav > ul { + padding: 0; + padding-left: 0.25rem; + margin: 0.25rem 0 0 0; +} + +.second-nav > ul > li { + padding-right: 3rem; + display: inline-block; + text-transform: uppercase; + font-family: Roboto Mono Variable,Roboto Mono,ui-monospace,monospace; + font-size: .875rem; + line-height: 1.25rem; +} + +.second-nav > ul > li > a { + color: #333; + padding-bottom: 8px; + padding-top: 8px; +} + +.second-nav > ul > li > a:hover { + text-decoration: none; +} + +.second-nav * .active-path { + font-weight: 600; +} + +@media (min-width: 768px) { + .td-main main { + padding-top: 7.5rem; + } +} + +.td-navbar { + min-height: 3.5rem; +} + +@media (max-width: 768px) { + .second-nav { + display: none; + } +} + +.navsectiontop.active-path { + border-bottom: 1px solid black; +} + +#navsearch { + min-width: 250.633px; +} + +span.section-overview { + display: none +} + +.navsectiontop { + display: none; +} + +@media (min-width: 768px) { + .td-sidebar-nav__section ul.ul-1 { + padding-top: 0.5rem + } + + .ul-1 > li.nav-fold.hide-if-desktop { + display: none; + } + + .navsectiontop { + display: unset; + } + + .menu-toggle.open-on-desktop { + display: none; + } + + li.nav-fold.open-on-desktop > span > ul > li { + display: block !important; + } + + li.nav-fold.header-only > span > span { + text-transform: uppercase; + font-family: Space Mono,sans-serif; + color: #aaa; + margin-top: 1rem; + } + + li.nav-fold.open-on-desktop.header-only:first-child > span > a { + margin-top: 0.5rem; + } + + li.nav-fold.open-on-desktop.header-only * li { + text-transform: unset; + } + + li.open-on-desktop > span > ul.ul-2 { + padding-left: 0; + } + + li.open-on-desktop.header-only > span > ul.ul-3 { + padding-left: 0; + } + + span.section-overview-title { + display: none + } + + span.section-overview { + display: block + } + +} + +a.tree-root { + display: none; +} + +// Top level nav END + +// Next page START + +#next-page { + clear: both; +} + +#next-page > div { + flex-direction: row; +} + +#next-page > div.right-only { + flex-direction: row-reverse; +} + +#next-page > div > div.hover-card { + min-width: 250px; + max-width: unset; +} + +@media (min-width: 500px) { + #next-page > div > div.hover-card { + max-width: calc((100% - 1.5rem)/2); + } +} + +#next-page > div > div.hover-card.left { + float: left; + margin: 0rem 0.75rem 0rem 0rem; +} + +#next-page > div > div.hover-card.right { + float: right; + margin: 0rem 0rem 0rem 0.75rem; +} + +#next-page > div > div.hover-card > a { + width: 100%; + color: black; + display: flex; + flex-direction: row; + justify-content: space-between; +} + +#next-page > div > div.hover-card > a > div > p { + font-family: space grotesk,sans-serif; + font-size: 1.1667rem !important; + line-height: 1.4375em; +} + +#next-page > div > div.hover-card > a > div { + display: flex; + align-items: center; +} + +#next-page > div > div.hover-card > a > div.text { + flex-direction: column; +} + +#next-page > div > div.hover-card > a > div.text > p:first-child { + text-transform: uppercase; + font-family: Space Mono,sans-serif; + color: #aaa; + font-size: 0.833rem !important; +} + +#next-page > div > div.hover-card.left > a > div.text > p:first-child { + align-self: flex-end; +} + +#next-page > div > div.hover-card.right > a > div.text > p:first-child { + text-transform: uppercase; + align-self: flex-start; +} + + +#next-page > div > div.hover-card > a > div > div { + margin-bottom: 0; +} + +#next-page > div > div.hover-card.left > a > div.arrow { + margin-right: 1.5rem; +} + +#next-page > div > div.hover-card.left > a > div { + text-align: right; +} + +#next-page > div > div.hover-card.right > a > div.arrow { + margin-left: 1.5rem; +} + +#next-page > div > div.hover-card.right > a > div { + text-align: left; +} + + +// Next page END + +// Platform landing pages START + +#fleet-platform-management, #fleet-platform-team, #fleet-platform-monitor, #data-platform-capture, #data-platform-work, #data-platform-ai, +#build-platform-connect, #build-platform-apps, #build-platform-motion, +#build-platform, #data-platform, #fleet-platform { + display: none; +} + +.hoverable-fleet:hover ~ #fleet-platform-all, .hoverable-team:hover ~ #fleet-platform-all, .hoverable-monitor:hover ~ #fleet-platform-all, +.hoverable-capture:hover ~ #data-platform-all, .hoverable-work:hover ~ #data-platform-all, .hoverable-ai:hover ~ #data-platform-all, +.hoverable-connect:hover ~ #build-platform-all, .hoverable-apps:hover ~ #build-platform-all, .hoverable-motion:hover ~ #build-platform-all, +.hoverable-build:hover ~ #platform-all, .hoverable-data:hover ~ #platform-all, .hoverable-fleet:hover ~ #platform-all { + display: none; +} + +.hoverable-fleet:hover ~ #fleet-platform-management, .hoverable-team:hover ~ #fleet-platform-team, .hoverable-monitor:hover ~ #fleet-platform-monitor, .hoverable-capture:hover ~ #data-platform-capture, .hoverable-work:hover ~ #data-platform-work, .hoverable-ai:hover ~ #data-platform-ai, +.hoverable-connect:hover ~ #build-platform-connect, .hoverable-apps:hover ~ #build-platform-apps, .hoverable-motion:hover ~ #build-platform-motion, +.hoverable-build:hover ~ #build-platform, .hoverable-data:hover ~ #data-platform, .hoverable-fleet:hover ~ #fleet-platform { + display: block !important; +} + +.upside-down { + display: flex; + flex-direction: column-reverse; +} + +.upside-down.max-page { + justify-self: center; +} + +.col-12.col-md-12.col-xl-12.pl-md-12 { + display: flex; + flex-direction: column; +} + +#rss-feed { + align-self: flex-end; +} + +.upside-down > .col.hover-card { + min-width: unset; + max-width: unset; +} + +.col-12.col-md-12.col-xl-12.pl-md-12 > .td-content { + display: flex; + justify-content: center; +} + +// Platform landing pages END diff --git a/assets/scss/_variables_project.scss b/assets/scss/_variables_project.scss index 26bb974419..281f7db75c 100644 --- a/assets/scss/_variables_project.scss +++ b/assets/scss/_variables_project.scss @@ -119,7 +119,7 @@ BEGIN CSS CHANGES FOR MARKETING @font-face { font-family: 'Space Grotesk'; font-style: normal; - font-weight: 700; + font-weight: 600; font-display: swap; src: url(https://fonts.gstatic.com/s/spacegrotesk/v13/V8mDoQDjQSkFtoMM3T6r8E7mPb94C-s0.woff2) format('woff2'); unicode-range: U+0100-024F, U+0259, U+1E00-1EFF, U+2020, U+20A0-20AB, U+20AD-20CF, U+2113, U+2C60-2C7F, U+A720-A7FF; diff --git a/assets/services/data/time-series.png b/assets/services/data/time-series.png new file mode 100644 index 0000000000..506189e99a Binary files /dev/null and b/assets/services/data/time-series.png differ diff --git a/docs/_index.md b/docs/_index.md index 3f400b11b2..b76fa8482a 100644 --- a/docs/_index.md +++ b/docs/_index.md @@ -5,6 +5,7 @@ description: "Viam integrates with hardware and software on any device. Use AI, weight: 1 no_list: true type: "docs" +layout: "landing" noToc: true hide_feedback: true sitemap: @@ -16,919 +17,40 @@ images: ["/general/understand.png"] noedit: true date: "2024-09-17" updated: "2024-10-11" +aliases: + - "/getting-started/" + - "/getting-started/high-level-overview" + - "/product-overviews/" + - "/viam/" + - "/viam/app.viam.com/" + - "/get-started/" + - "/platform/" --- -
-
-
-

Viam Documentation

-

- Viam integrates with hardware and software on any device. Use AI, machine learning, and more to make any machine smarter—for one machine to thousands. -

- -
- Robot illustration -
-
-
- -
- -## Program any device - -To get started, install Viam on any device and create a configuration that describes connected hardware as {{< glossary_tooltip term_id="component" text="components" >}}. Then you can control your device and any attached physical hardware securely **from anywhere in the world**. Or from local networks. - -{{< tabs class="horizontalheaders program" navheader="Examples">}} -{{% tab name="Drive a base" %}} - -
- -{{< tabs >}} -{{% tab name="Python" %}} - -```python -async def moveInSquare(base): - for _ in range(4): - # Move forward 500mm at 500mm/s - await base.move_straight(velocity=500, distance=500) - # Spin 90 degrees at 100 degrees/s - await base.spin(velocity=100, angle=90) -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -func moveInSquare(ctx context.Context, base base.Base, logger logging.Logger) { - for i := 0; i < 4; i++ { - // Move forward 500mm at 500mm/s - base.MoveStraight(ctx, 600, 500.0, nil) - // Spin 90 degrees at 100 degrees/s - base.Spin(ctx, 90, 100.0, nil) - } -} -``` - -{{% /tab %}} -{{% tab name="TypeScript" %}} - -```ts -async function moveInSquare(baseClient: VIAM.BaseClient) { - for (let i = 0; i < 4; i++) { - // Move forward 500mm at 500mm/s - await baseClient.moveStraight(500, 500); - // Spin 90 degrees at 100 degrees/s - await baseClient.spin(90, 100); - } -} -``` - -{{% /tab %}} -{{% tab name="Flutter" %}} - -```dart -Future moveSquare() async { - for (var i=0; i<4; i++) { - // Move forward 500mm at 500mm/s - await base.moveStraight(500, 500); - // Spins the rover 90 degrees at 100 degrees/s - await base.spin(90, 100); - } -} -``` - -{{% /tab %}} -{{% tab name="C++" %}} - -```cpp -void move_in_square(std::shared_ptr base) { - for (int i = 0; i < 4; ++i) { - // Move forward 500mm at 500mm/s - base->move_straight(500, 500); - // Spins the rover 90 degrees at 100 degrees/s - base->spin(90, 100); - } -} -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -You can use any robotic base with Viam. Configure it as a base component. Then you can drive it using the base API. - -[Drive a base →](/how-tos/drive-rover/) - -
-
- -{{}} - -
-
-
- -{{% /tab %}} -{{% tab name="Control motor" %}} - -
- -{{< tabs >}} -{{% tab name="Python" %}} - -```python -async def spin_motor(motor): - # Turn the motor at 35% power forwards - await motor.set_power(power=0.35) - # Let the motor spin for 3 seconds - time.sleep(3) - # Stop the motor - await motor.stop() -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -func spinMotor(ctx context.Context, motor motor.Motor, logger logging.Logger) { - // Turn the motor at 35% power forwards - err = motor.SetPower(context.Background(), 0.35, nil) - // Let the motor spin for 3 seconds - time.Sleep(3 * time.Second) - // Stop the motor - err = motor.Stop(context.Background(), nil) -} -``` - -{{% /tab %}} -{{% tab name="TypeScript" %}} - -```ts -async function spinMotor(motorClient: VIAM.MotorClient) { - // Turn the motor at 35% power forwards - await motorClient.setPower(0.35); - // Let the motor spin for 3 seconds - const sleep = (ms: number) => - new Promise((resolve) => setTimeout(resolve, ms)); - await sleep(3000); - // Stop the motor - await motorClient.stop(); -} -``` - -{{% /tab %}} -{{% tab name="Flutter" %}} - -```dart -Future spinMotor() async { - // Turn the motor at 35% power forwards - await motorClient.setPower(0.35); - // Let the motor spin for 3 seconds - await Future.delayed(Duration(seconds: 3)); - // Stop the motor - await motorClient.stop(); -} -``` - -{{% /tab %}} -{{% tab name="C++" %}} - -```cpp -void spin_motor(std::shared_ptr motor) { - // Turn the motor at 35% power forwards - motor->set_power(0.35); - // Let the motor spin for 3 seconds - sleep(3); - // Stop the motor - motor->stop(); -} -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -You can use any motor with Viam. Configure it as a motor component. Then you can operate it using the motor API. - -[Control a motor →](/how-tos/control-motor/) - -
-
- -{{}} - -
-
-
-{{% /tab %}} -{{% tab name="Get sensor reading" %}} -
- -{{< tabs >}} -{{% tab name="Python" %}} - -```python -# Get the readings provided by the sensor. -co_2_monitor = Sensor.from_robot(machine, "co2-monitor") -co_2_monitor_return_value = await co_2_monitor.get_readings() -print(f"co2-monitor get_readings return value: {co_2_monitor_return_value}") -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -// Get the readings provided by the sensor. -co2Monitor, err := sensor.FromRobot(machine, "co2-monitor") -co2MonitorReturnValue, err := co2Monitor.Readings( - context.Background(), map[string]interface{}{}) -logger.Infof("co2-monitor return value: %+v", co2MonitorReturnValue) -``` - -{{% /tab %}} -{{% tab name="TypeScript" %}} - -```ts -// Get the readings provided by the sensor. -const co2MonitorClient = new VIAM.SensorClient(machine, "co2-monitor"); -const co2MonitorReturnValue = await co2MonitorClient.getReadings(); -console.log("co2-monitor return value:", co2MonitorReturnValue); -``` - -{{% /tab %}} -{{% tab name="Flutter" %}} - -```dart -// Get the readings provided by the sensor. -final co2Monitor = Sensor.fromRobot(client, "co2-monitor"); -var readings = await co2Monitor.readings(); -print(readings); -``` - -{{% /tab %}} -{{% tab name="C++" %}} - -```cpp -// Get the readings provided by the sensor. -auto co2monitor = machine->resource_by_name("co2-monitor"); -auto co2monitor_get_readings_return_value = co2monitor->get_readings(); -std::cout << "co2-monitor get_readings return value " << co2monitor_get_readings_return_value << "\n"; -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -You can use any physical sensor or anything else that provides measurements with Viam. Configure it as a sensor component. Then you can get sensor readings using the sensor API. - -[Collect sensor data →](/how-tos/collect-sensor-data/) - -
-
-
-{{% /tab %}} -{{% tab name="Move an arm" %}} -
- -{{< tabs >}} -{{% tab name="Python" %}} - -```python -# Command a joint position move: move the forearm of the arm slightly up -cmd_joint_positions = JointPositions(values=[0, 0, -30.0, 0, 0, 0]) -await my_arm_component.move_to_joint_positions( - positions=cmd_joint_positions) - -# Generate a simple pose move +100mm in the +Z direction of the arm -cmd_arm_pose = await my_arm_component.get_end_position() -cmd_arm_pose.z += 100.0 -await my_arm_component.move_to_position(pose=cmd_arm_pose) -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -// Command a joint position move: move the forearm of the arm slightly up -cmdJointPositions := &armapi.JointPositions{Values: []float64{0.0, 0.0, -30.0, 0.0, 0.0, 0.0}} -err = myArmComponent.MoveToJointPositions(context.Background(), cmdJointPositions, nil) - -// Generate a simple pose move +100mm in the +Z direction of the arm -currentArmPose, err := myArmComponent.EndPosition(context.Background(), nil) -adjustedArmPoint := currentArmPose.Point() -adjustedArmPoint.Z += 100.0 -cmdArmPose := spatialmath.NewPose(adjustedArmPoint, currentArmPose.Orientation()) - -err = myArmComponent.MoveToPosition(context.Background(), cmdArmPose, nil) -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -You can use any robotic arm with Viam. -Configure it as an arm component. Then you can move it using the arm API. - -[Move a robotic arm →](/how-tos/move-robot-arm/) - -
-
- -{{}} - -
-
-
-{{% /tab %}} -{{% tab name="Operate custom hardware" %}} -
- -{{< tabs >}} -{{% tab name="Python" %}} - -```python -my_button = Generic.from_robot(robot=machine, name="my_button") - -# Use a custom command to push the button 5 -command = {"cmd": "push_button", "button": 5} -result = await my_button.do_command(command) -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -myButton, err := generic.FromRobot(machine, "my_button") - -// Use a custom command to push the button 5 -command := map[string]interface{}{"cmd": "push_button", "button": 5} -result, err := myButton.DoCommand(context.Background(), command) -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -Using the Viam Registry you can create _{{< glossary_tooltip term_id="resource" text="resources" >}}_ for additional hardware types or models and then deploy them to your machines. -You can use an existing component or service type or create generic resources. - -[Create a module →](/how-tos/hello-world-module/) - -
-
-
-{{% /tab %}} -{{< /tabs >}} - -
-
-
- -## Make your devices better and smarter - -

- Pick and choose from additional services. Make your devices understand their environment, interact with it, collect data, and more: -

-
- -{{< tabs class="horizontalheaders services" navheader="Services">}} -{{% tab name="Computer Vision" %}} - -
- -{{< tabs >}} -{{% tab name="Python" %}} - -```python -# Get image from camera stream on construction site -cam = Camera.from_robot(machine, "construction-site-cam") -img = await cam.get_image() - -# Use machine learning model to gather information from the image -hardhat_detector = VisionClient.from_robot(machine, "hardhat_detector") -detections = await hardhat_detector.get_detections(img) - -# Check whether a person is detected not wearing a hardhat -for d in detections: - if d.confidence > 0.8 and d.class_name == "NO-Hardhat": - print("Violation detected.") -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -// Get image from camera stream on construction site -myCamera, err := camera.FromRobot(machine, "construction-site-cam") -camStream, err := myCamera.Stream(context.Background()) -img, release, err := camStream.Next(context.Background()) -defer release() - -// Use machine learning model to gather information from the image -visService, err := vision.FromRobot(machine, "hardhat_detector") -detections, err := visService.Detections(context.Background(), img, nil) - -// Check whether a person is detected not wearing a hardhat -for i := 0; i < len(detections); i++ { - if (detection[i].confidence > 0.8) && (detection[i].class_name == "NO-Hardhat") { - logger.Info("Violation detected.") - } -} -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -Computer vision enables your machine to use connected cameras to interpret the world around it. -With inferences about a machine's surroundings, you can program machines to act based on this input. - -[Try the vision service →](/tutorials/projects/helmet/) - -
-
- -{{}} - -
-
-
- -{{% /tab %}} -{{% tab name="Data Management" %}} - -
- -{{< tabs >}} -{{% tab name="Captured Data" %}} - -{{}} - -{{% /tab %}} -{{% tab name="Query Data" %}} - -```python -# Tag data from the my_camera component -my_filter = create_filter(component_name="my_camera") -tags = ["frontview", "trainingdata"] -res = await data_client.add_tags_to_binary_data_by_filter(tags, my_filter) - -# Query sensor data by filter -my_data = [] -my_filter = create_filter( - component_name="sensor-1", - start_time=Timestamp('2024-10-01 10:00:00', tz='US/Pacific'), - end_time=Timestamp('2024-10-12 18:00:00', tz='US/Pacific') -) -tabular_data, count, last = await data_client.tabular_data_by_filter( - my_filter, last=None) -``` + +
-{{% /tab %}} -{{< /tabs >}} - -
-
- -Sync sensor data, images, and any other binary or timeseries data from all your machines to the cloud. There, you can query and visualize it. - -Intermittent internet connectivity? Your data will sync whenever internet is available. - -[Learn about Data Management →](/services/data/) - -
-
-
-{{% /tab %}} -{{% tab name="Motion" %}} -
- -{{< tabs >}} -{{% tab name="Python" %}} - -```python -# Add a table obstacle to a WorldState -table_origin = Pose(x=-202.5, y=-546.5, z=-19.0) -table_dimensions = Vector3(x=635.0, y=1271.0, z=38.0) -table_object = Geometry(center=table_origin, - box=RectangularPrism(dims_mm=table_dimensions)) -obstacles_in_frame = GeometriesInFrame(reference_frame="world", - geometries=[table_object]) -world_state = WorldState(obstacles=[obstacles_in_frame]) - -# Destination pose to move to -dest_in_frame = PoseInFrame( - reference_frame="world", - pose=Pose(x=510.0, y=0.0, z=526.0, o_x=0.7, o_y=0.0, o_z=-0.7, theta=0.0)) - -# Move arm to destination pose -motion_service = MotionClient.from_robot(robot, "builtin") -await motion_service.move( - component_name=Arm.get_resource_name("myArm"), - destination=dest_in_frame, world_state=world_state) -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -// Add a table obstacle to a WorldState -obstacles := make([]spatialmath.Geometry, 0) -tableOrigin := spatialmath.NewPose( - r3.Vector{X: 0.0, Y: 0.0, Z: -10.0}, - &spatialmath.OrientationVectorDegrees{OX: 0.0, OY: 0.0, OZ: 1.0, Theta: 0.0}, -) -tableDimensions := r3.Vector{X: 2000.0, Y: 2000.0, Z: 20.0} -tableObj, err := spatialmath.NewBox(tableOrigin, tableDimensions, "table") -obstacles = append(obstacles, tableObj) -obstaclesInFrame := referenceframe.NewGeometriesInFrame(referenceframe.World, obstacles) -worldState, err := referenceframe.NewWorldState([]*referenceframe.GeometriesInFrame{obstaclesInFrame}, nil) - -// Destination pose to move to -destinationPose := spatialmath.NewPose( - r3.Vector{X: 510.0, Y: 0.0, Z: 526.0}, - &spatialmath.OrientationVectorDegrees{OX: 0.7071, OY: 0.0, OZ: -0.7071, Theta: 0.0}, -) -destPoseInFrame := referenceframe.NewPoseInFrame( - referenceframe.World, destinationPose) - -// Move arm to destination pose -motionService, err := motion.FromRobot(robot, "builtin") -_, err = motionService.Move(context.Background(), arm.Named("myArm"), destPoseInFrame, worldState, nil, nil) -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -The motion service enables your machine to plan and move relative to itself, other machines, and the world. - -[Try the motion service →](/tutorials/services/plan-motion-with-arm-gripper/) - -
-
- -{{}} - -
-
-
-{{% /tab %}} -{{% tab name="Navigation" %}} -
- -{{< tabs >}} -{{% tab name="Python" %}} - -```python -my_nav = NavigationClient.from_robot(robot=robot, name="my_nav_service") - -# Create a new waypoint at the specified latitude and longitude -location = GeoPoint(latitude=40.76275, longitude=-73.96) - -# Add waypoint to the service's data storage -await my_nav.add_waypoint(point=location) - -my_nav = NavigationClient.from_robot(robot=robot, name="my_nav_service") - -# Set the service to operate in waypoint mode and begin navigation -await my_nav.set_mode(Mode.ValueType.MODE_WAYPOINT) -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -myNav, err := navigation.FromRobot(robot, "my_nav_service") - -// Create a new waypoint at the specified latitude and longitude -location = geo.NewPoint(40.76275, -73.96) - -// Add waypoint to the service's data storage -err := myNav.AddWaypoint(context.Background(), location, nil) - -myNav, err := navigation.FromRobot(robot, "my_nav_service") - -// Set the service to operate in waypoint mode and begin navigation -mode, err := myNav.SetMode(context.Background(), Mode.MODE_WAYPOINT, nil) -``` - -{{% /tab %}} -{{% tab name="Viam app" %}} - -{{< imgproc src="/services/navigation/navigation-control-card.png" alt="An example control interface for a navigation service in the Viam app Control Tab." resize="1200x" class="imgzoom aligncenter" >}} - -{{% /tab %}} -{{< /tabs >}} - -
-
- -Use the navigation service to autonomously navigate a machine to defined waypoints. - -[Try the navigation service →](/tutorials/services/navigate-with-rover-base/) - -
-
-
-{{% /tab %}} -{{% tab name="Custom Logic" %}} -
- -{{< tabs >}} -{{% tab name="Python" %}} - -```python -my_twilio_svc = Generic.from_robot(robot=machine, name="my_twilio_svc") - -# Use a custom command to send a text message with Twilio -command = {"to": "+1 234 567 8901", "body": "Hello world!"} -result = await my_twilio_svc.do_command(command) -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -myTwilioSvc, err := generic.FromRobot(machine, "my_twilio_svc") - -// Use a custom command to send a text message with Twilio -command := map[string]interface{}{"to": "+1 234 567 8901", "body": "Hello world!"} -result, err := myTwilioSvc.DoCommand(context.Background(), command) -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -Using the Viam Registry you can turn services and your own custom business logic into _{{< glossary_tooltip term_id="module" text="modules" >}}_. You can then deploy your modules to your machines. - -[Create a module →](/how-tos/create-module/) - -
-
-
-{{% /tab %}} -{{< /tabs >}} - -
-
-
- -## Go from one machine to thousands - -

- When you connect machines to the cloud you get fleet management tools that let you scale. Go from one prototype to thousands of machines you can manage and operate from one place using the Viam Cloud. -

+ -{{< tabs class="horizontalheaders platform" navheader="Capabilities">}} -{{% tab name="Deployment" %}} - -
- -{{< tabs >}} -{{% tab name="Fragment" %}} - -```json -// Reusable configuration for using a software package -{ - "services": [ - { - "name": "speech-1", - "namespace": "viam-labs", - "type": "speech", - "model": "viam-labs:speech:speechio" - } - ], - "modules": [ - { - "type": "registry", - "name": "viam-labs_speech", - "module_id": "viam-labs:speech", - // Specific version to deploy - "version": "0.5.2" - } - ] -} -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -Manage hardware and software for multiple machines using a built-in tool called _{{< glossary_tooltip term_id="fragment" text="fragments" >}}_. -You can make changes to some or all of your machines in one go. - -[Deploy packages across devices →](/how-tos/deploy-packages/) - -
+ -
- -{{% /tab %}} -{{% tab name="Provisioning" %}} - -
- -{{< tabs >}} -{{% tab name="Shell" %}} - -```sh {class="command-line" data-prompt="$" data-output="3-5,6,7"} -# Create configuration for provisioning machines with a fragment -echo "{ - "manufacturer": "Company", - "model": "SmartRover", - "fragment_id": "11d1059b-eaed-4ad8-9fd8-d60ad7386aa2" -}" >> viam-provisioning.json - -# Get and run the script to install viam on a board. -wget https://storage.googleapis.com/packages.viam.com/apps/viam-agent/preinstall.sh -chmod 755 preinstall.sh -sudo ./preinstall.sh -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
-Provisioning allows you to complete part of the machine setup during the manufacturing process. The rest of the first-time setup happens once the machine is taken into operation. -This way, machines automatically get the latest updates. - -[Learn about provisioning →](/fleet/provision/) - -
+ -
- -{{% /tab %}} -{{% tab name="Observability" %}} - -
- -{{< tabs >}} -{{% tab name="Viam app" %}} - -{{< imgproc src="/fleet/dashboard.png" alt="Dashboard view of machine status information" resize="1200x" class="imgzoom aligncenter" >}} - -{{% /tab %}} -{{% tab name="Python" %}} -```python -# Get all machines in a location -machines = await cloud.list_robots(location_id="abcde1fghi") +Platform diagram with build elements highlighted +Platform diagram with connect elements highlighted +Platform diagram with apps element highlighted +Platform diagram with motion elements highlighted -for m in machines: - # Connect and get status information or latest logs - machine_parts = await cloud.get_robot_parts(m.id) - main_part = next(filter(lambda part: part.main_part, machine_parts), None) - - try: - # Get status for machine - machine = await connect(main_part.fqdn) - status = await machine.get_machine_status() - except ConnectionError: - # If no connection can be made, get last logs - logs = await cloud.get_robot_part_logs( - robot_part_id=main_part.id, num_log_entries=5) -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -Get status information and logs from all your deployed machines. - -[Learn about Platform APIs →](/appendix/apis/#platform-apis) - -
-
- -{{% /tab %}} -{{% tab name="ML Training" %}} - -
- -{{< tabs >}} -{{% tab name="Viam app" %}} - -{{< imgproc src="/tutorials/data-management/train-model.png" alt="The data tab showing the train a model pane" resize="1200x" class="imgzoom" >}} - -{{% /tab %}} -{{% tab name="Python" %}} - -```python -# Start a training job to create a classification model based on the dataset -job_id = await ml_training_client.submit_training_job( - org_id="abbc1c1c-d2e3-5f67-ab8c-de912345f678", - dataset_id="12ab3cd4e56f7abc89de1fa2", - model_name="recognize_gestures", - model_version="1", - model_type=ModelType.MODEL_TYPE_MULTI_LABEL_CLASSIFICATION, - tags=["follow", "stop"] -) - -# Get status information for training job -job_metadata = await ml_training_client.get_training_job( - id=job_id) -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -Build machine learning models based on your machines' data. You can pick from different training algorithms or create your own. - -[Train and deploy ML models →](/how-tos/train-deploy-ml/) - -
-
-
- -{{% /tab %}} -{{% tab name="Collaboration" %}} - -
- -{{< tabs >}} -{{% tab name="Viam app" %}} - -{{}} - -{{% /tab %}} -{{% tab name="Python" %}} - -```python -# Create a new machine -new_machine_id = await cloud.new_robot( - name="new-machine", location_id="abcde1fghi") - -# Get organization associated with authenticated user / API key -org_list = await cloud.list_organizations() - -# Create a new API key with owner access for the new machine -auth = APIKeyAuthorization( - role="owner", - resource_type="robot", - resource_id=new_machine_id -) -api_key, api_key_id = await cloud.create_key( - org_list[0].id, [auth], "key_for_new_machine") -``` - -{{% /tab %}} -{{< /tabs >}} - -
-
- -Viam allows you to organize and manage any number of machines. When collaborating with others, you can assign permissions using Role-Based Access Control (RBAC). - -[Learn about access control →](/cloud/rbac/) - -
-
-
- -{{% /tab %}} -{{< /tabs >}} diff --git a/docs/appendix/_index.md b/docs/appendix/_index.md deleted file mode 100644 index 37f744a50b..0000000000 --- a/docs/appendix/_index.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: "Appendix" -linkTitle: "Appendix" -weight: 900 -empty_node: true -layout: "empty" -notoc: true -type: "docs" -description: "Reference and Background Material" -canonical: "/appendix/changelog/" ---- diff --git a/docs/appendix/apis/_index.md b/docs/appendix/apis/_index.md deleted file mode 100644 index b02f0167d5..0000000000 --- a/docs/appendix/apis/_index.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -title: "Viam's Client APIs" -linkTitle: "APIs" -weight: 20 -type: "docs" -description: "Access and control your machine or fleet with the SDKs' client libraries for the resource and robot APIs." -icon: true -images: ["/services/icons/sdk.svg"] -tags: ["client", "sdk", "viam-server", "networking", "apis", "robot api"] -aliases: - - /program/sdks/ - - /program/apis/ - - /build/program/apis/ -no_list: true -date: "2024-10-01" -# updated: "" # When the content was last entirely checked ---- - -Every Viam {{< glossary_tooltip term_id="resource" text="resource" >}} exposes an [application programming interface (API)](https://en.wikipedia.org/wiki/API) described through [protocol buffers](https://developers.google.com/protocol-buffers). - -The API methods provided by the SDKs for each of these resource APIs wrap gRPC client requests to the machine when you execute your program, providing you a convenient interface for accessing information about and controlling the {{< glossary_tooltip term_id="resource" text="resources" >}} you have [configured](/configure/) on your machine. - -## Platform APIs - -{{< cards >}} -{{% manualcard link="/appendix/apis/fleet/" title="Fleet Management API" %}} - -Create and manage organizations, locations, and machines, get logs from individual machines, and manage fragments and permissions. - -{{% /manualcard %}} -{{% manualcard link="/appendix/apis/data-client/" title="Data Client API" %}} - -Upload, download, filter, tag or perform other tasks on data like images or sensor readings. - -{{% /manualcard %}} -{{% manualcard link="/appendix/apis/robot/" title="Machine Management API" %}} - -Manage your machines: connect to your machine, retrieve status information, and send commands remotely. - -{{% /manualcard %}} -{{% manualcard link="/appendix/apis/ml-training-client/" title="ML Training Client API" %}} - -Submit and manage ML training jobs running on the Viam app. - -{{% /manualcard %}} -{{% manualcard link="/appendix/apis/billing-client/" title="Billing Client API" %}} - -Retrieve billing information from the Viam app. - -{{% /manualcard %}} - -{{< /cards >}} - -## Component APIs - -These APIs provide interfaces for controlling and getting information from the {{< glossary_tooltip term_id="component" text="components" >}} of a machine: - -{{< cards >}} -{{< card link="/appendix/apis/components/arm/" customTitle="Arm API" noimage="True" >}} -{{< card link="/appendix/apis/components/base/" customTitle="Base API" noimage="True" >}} -{{< card link="/appendix/apis/components/board/" customTitle="Board API" noimage="True" >}} -{{< card link="/appendix/apis/components/camera/" customTitle="Camera API" noimage="True" >}} -{{< card link="/appendix/apis/components/encoder/" customTitle="Encoder API" noimage="True" >}} -{{< card link="/appendix/apis/components/gantry/" customTitle="Gantry API" noimage="True" >}} -{{< card link="/appendix/apis/components/generic/" customTitle="Generic API" noimage="True" >}} -{{< card link="/appendix/apis/components/gripper/" customTitle="Gripper API" noimage="True" >}} -{{< card link="/appendix/apis/components/input-controller/" customTitle="Input controller API" noimage="True" >}} -{{< card link="/appendix/apis/components/motor/" customTitle="Motor API" noimage="True" >}} -{{< card link="/appendix/apis/components/movement-sensor/" customTitle="Movement sensor API" noimage="True" >}} -{{< card link="/appendix/apis/components/power-sensor/" customTitle="Power sensor API" noimage="True" >}} -{{< card link="/appendix/apis/components/sensor/" customTitle="Sensor API" noimage="True" >}} -{{< card link="/appendix/apis/components/servo/" customTitle="Servo API" noimage="True" >}} -{{< /cards >}} - -## Service APIs - -These APIs provide interfaces for controlling and getting information from the services you configured on a machine. - -{{< cards >}} -{{% card link="/appendix/apis/services/data/" customTitle="Data management service API" noimage="True" %}} -{{% card link="/appendix/apis/services/vision/" customTitle="Vision service API" noimage="True" %}} -{{% card link="/appendix/apis/services/ml/" customTitle="ML model service API" noimage="True" %}} -{{% card link="/appendix/apis/services/motion/" customTitle="Motion service API" noimage="True" %}} -{{% card link="/appendix/apis/services/navigation/" customTitle="Navigation service API" noimage="True" %}} -{{% card link="/appendix/apis/services/generic/" customTitle="Generic service API" noimage="True" %}} -{{% card link="/appendix/apis/services/slam/" customTitle="SLAM service API" noimage="True" %}} -{{% card link="/appendix/apis/services/base-rc/" customTitle="Base Remote Control service API" noimage="True" %}} -{{< /cards >}} diff --git a/docs/appendix/apis/services/vision.md b/docs/appendix/apis/services/vision.md deleted file mode 100644 index 272129784c..0000000000 --- a/docs/appendix/apis/services/vision.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -title: "Vision service API" -linkTitle: "Vision" -weight: 20 -type: "docs" -tags: ["vision", "computer vision", "CV", "services"] -description: "Give commands to get detections, classifications, or point cloud objects, depending on the ML model the vision service is using." -icon: true -images: ["/services/icons/vision.svg"] -tags: ["vision", "computer vision", "CV", "services"] -date: "2022-01-01" -# updated: "" # When the content was last entirely checked ---- - -The vision service API allows you to get detections, classifications, or point cloud objects, depending on the ML model the vision service is using. - -The [vision service](/services/vision/) supports the following methods: - -{{< readfile "/static/include/services/apis/generated/vision-table.md" >}} - -## Establish a connection - -To get started using Viam's SDKs to connect to and control your machine, go to your machine's page on the [Viam app](https://app.viam.com), navigate to the **CONNECT** tab's **Code sample** page, select your preferred programming language, and copy the sample code. - -{{% snippet "show-secret.md" %}} - -When executed, this sample code creates a connection to your machine as a client. - -The following examples assume that you have a machine configured with a [camera](/components/camera/) and a vision service [detector](/services/vision/#detections), [classifier](/services/vision/#classifications) or [segmenter](/services/vision/#segmentations). - -{{< tabs >}} -{{% tab name="Python" %}} - -```python -from viam.services.vision import VisionClient -``` - -{{% /tab %}} -{{% tab name="Go" %}} - -```go -import ( - "go.viam.com/rdk/services/vision" -) -``` - -{{% /tab %}} -{{< /tabs >}} - -## API - -{{< readfile "/static/include/services/apis/generated/vision.md" >}} diff --git a/docs/appendix/glossary/model-namespace-triplet.md b/docs/appendix/glossary/model-namespace-triplet.md deleted file mode 100644 index 9672f0f86a..0000000000 --- a/docs/appendix/glossary/model-namespace-triplet.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: Model Namespace Triplet -id: model-namespace-triplet -full_link: /how-tos/create-module/#name-your-new-resource-model -short_description: namespace:repo-name:name or rdk:builtin:name ---- - -{{< glossary_tooltip term_id="model" text="Models" >}} are uniquely namespaced as colon-delimited-triplets. -Modular resource model names have the form `namespace:repo-name:name`, for example `esmeraldaLabs:sensors:moisture`. -Built-in model names have the form `rdk:builtin:name`, for example `rdk:builtin:gpio`. -See [Name your new resource model](/how-tos/create-module/#name-your-new-resource-model) for more information. diff --git a/docs/appendix/glossary/part.md b/docs/appendix/glossary/part.md deleted file mode 100644 index c1b3876740..0000000000 --- a/docs/appendix/glossary/part.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -title: Part -id: part -full_link: /architecture/parts/ -short_description: A single-board computer, desktop, laptop, or other computer running viam-server, the hardware components attached to it, and any services or other resources running on it. ---- - -Smart machines are organized into _parts_, where each part represents a computer (a [single-board computer](/installation/viam-server-setup/), desktop, laptop, or other computer) running `viam-server`, the hardware {{< glossary_tooltip term_id="component" text="components" >}} attached to it, and any {{< glossary_tooltip term_id="service" text="services" >}} or other resources running on it. - -For more information, see [Machine Architecture: Parts](/architecture/parts/). diff --git a/docs/appendix/learning-resources.md b/docs/appendix/learning-resources.md deleted file mode 100644 index 91aa005e23..0000000000 --- a/docs/appendix/learning-resources.md +++ /dev/null @@ -1,87 +0,0 @@ ---- -title: "Learning Resources" -linkTitle: "Learning Resources" -description: "A collection of links to external sources discussing robotics topics and basic information that we believe users may find helpful." -type: "docs" -draft: true ---- - -## Overview - -The following sections contain links that we think you will find useful during your journey into robotics. - -## Basic electronics - -### Hobby servos - -Hobby servos are a type of actuator comprising a small motor with built-in closed-loop control. -They are useful for precise positioning, usually limited to a 180 degree range of angles. -Continuous rotation servos are also available that maintain a speed rather than a position. - -#### Mechanism - -Hobby servos contain a small electric motor, a series of gears, and a potentiometer attached to the shaft to act as an encoder. -It also contains a closed-loop position control circuit that takes a [Pulse Width Modulation (PWM)](https://en.wikipedia.org/wiki/Pulse-width_modulation) signal input and holds the shaft at a certain angle based on that input. - -A typical servo will take PWM pulses ranging from 1ms to 2ms long, and map this range to a 180 degree range of possible positions. -A 1.5ms signal will hold the servo in the middle or "neutral" position, 1ms will move it to 90 degrees from there in one direction, and 2ms will move it 90 degrees from neutral in the opposite direction. -Note that some servos have a different PWM range, mapping to a different set of angles. - -#### Hardware requirements - -Unlike [motors](/components/motor/), servos do not require a motor driver chip. - -A typical servo control setup comprises the following: - -- A Raspberry Pi (or other [board](/components/board/)) -- A servo -- An appropriate power supply - - If the servo will not be under any significant load and thus won’t draw much current, you may be able to get away with powering it off 5V (if that’s its required voltage) from the Pi pins. - However it is advisable to power it directly from a power supply that can meet its peak current needs so as not to inadvertently power cycle the Pi or other components. - -#### Wiring - -{{% alert title="Caution" color="caution" %}} -Always disconnect devices from power before plugging, unplugging or moving wires or otherwise modifying electrical circuits. -{{% /alert %}} - -Here's an example of how a servo might be wired to a Raspberry Pi: - -![A diagram showing the signal wire of a servo connected to pin 16 on a Raspberry Pi. The servo's power wires are connected to a 4.8V power supply.](/components/servo/servo-wiring.png) - -### Resistors - -[Online Resistor Color Code Calculator](https://goodcalculators.com/resistor-color-code-calculator/) - Enter the desired resistor value in Ohms, kOhms, or MOhms, and press enter and this site displays the color bands for that resistor value. - -#### Resistor value chart - -![Chart of standard colors to values for electronic components. An example resistor with green, red, and orange bands is shown. The value is 52 times 10 to the third power, or 52,000 Ohms.](/internals/vector/resistor.png) - -You can easily learn resistor color markings without referring to a chart by remembering this jingle: - -"Badly Burnt Resistors On Your Ground Bus Void General Warranty." - -Now, equate the jingle to the colors in this order: -Black, Brown, Red, Orange, Yellow, Green, Blue, Violet, Gray, White - -And their values on a resistor: -0, 1, 2, 3, 4, 5, 6, 7, 8, 9 - -- The bands 1 and 2 indicate the first two significant digits on a resistor. -- Band 3 is a multiplier on four-band resistors. - For example, a resistor with brown, green, orange bands representing, 1, 5, and 3, respectively, which equates to 15 times ten to the third, or 15,000 Ohms, or 15 kOhms. -- On resistors with four bands, the band 4 indicates tolerance, with gold being +/- 5% and silver being +/- 10%. -- On five-band resistors, band 3 becomes an additional significant digit, band 4 becomes the multiplier, and band 5 becomes the tolerance band. -- Six-band resistors are read identically to five-band resistors, their difference being that the sixth band indicates the resistor's temperature coefficient. - -### LEDs (light-emitting diodes) - -Light-emitting diodes come in a variety of form factors: -![Image of various Light Emitting Diode form factors.](/internals/vector/verschiedene-leds.jpg) -LEDs commonly have two leads, although specialty LEDs are available that are capable of simultaneously displaying two colors or of displaying a blended shade. These specialty LEDs have 4-6 leads and 2-4 LED junctions. - -LEDs work by applying a voltage with a positive and negative polarity to the leads in such a manner that the positive voltage is attached to the anode of the LED and the negative voltage lead is attached to the LED's cathode. On a two-pin LED, the longer pin is the anode and the short pin is the cathode. - -LEDs require current-limiting resistors to avoid destroying the LED junction during an over-current situation. Always include a current-limiting resistor in basic LED circuits. The following schematic illustrates this circuit: - -![This image displays a schematic showing the arrangement of a DC voltage source with the positive lead to the LED's anode, the LED's cathode connected to a one end of a current-limiting resistor and the other end of the voltage drop resistor connected to the negative lead of the voltage source, completing the circuit.](/internals/vector/led-circuit2.png) diff --git a/docs/architecture/viam-micro-server.md b/docs/architecture/viam-micro-server.md deleted file mode 100644 index fb3bb4efdf..0000000000 --- a/docs/architecture/viam-micro-server.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: "viam-micro-server" -linkTitle: "viam-micro-server" -weight: 90 -type: docs -images: ["/installation/thumbnails/esp32-espressif.png"] -imageAlt: "E S P 32 - espressif" -description: "Set up the Espressif ESP32 for development with `viam-micro-server`." -date: "2024-09-03" -# updated: "" # When the content was last entirely checked -# SMEs: Nicolas M., Gautham V., Andrew M. ---- - -`viam-micro-server` is the lightweight version of [`viam-server`](/architecture/viam-server/) which can run on resource-limited embedded systems (ESP32) that cannot run the fully-featured `viam-server`. -`viam-micro-server` is built from the open-source [micro-RDK](https://github.com/viamrobotics/micro-rdk/). - -## Hardware requirements - -{{% readfile "/static/include/micro-rdk-hardware.md" %}} - -## Support - -[Client API](/appendix/apis/) usage with the micro-RDK currently supports the following {{< glossary_tooltip term_id="resource" text="resources" >}}: - -{{< cards >}} -{{% relatedcard link="/components/base/" %}} -{{% relatedcard link="/components/board/" %}} -{{% relatedcard link="/components/camera/" %}} -{{% relatedcard link="/components/encoder/" %}} -{{% relatedcard link="/components/movement-sensor/" %}} -{{% relatedcard link="/components/motor/" %}} -{{% relatedcard link="/components/sensor/" %}} -{{% relatedcard link="/components/servo/" %}} -{{% relatedcard link="/components/generic/" %}} -{{% relatedcard link="/services/data/" %}} -{{< /cards >}} - -Click on each supported resource to see available models, API methods, and configuration info. - -## Next steps - -To use `viam-micro-server`, follow the installation guide. -If you want to access camera functionality, extend the functionality of `viam-micro-server, or customize it see the development setup guide. - -{{< cards >}} -{{% card link="/installation/viam-server-setup/" %}} -{{% card link="/installation/viam-micro-server-dev/" %}} -{{< /cards >}} diff --git a/docs/cloud/_index.md b/docs/cloud/_index.md deleted file mode 100644 index cf949ff3c5..0000000000 --- a/docs/cloud/_index.md +++ /dev/null @@ -1,102 +0,0 @@ ---- -title: "Cloud Organization Hierarchy" -linkTitle: "Cloud Organization Hierarchy" -weight: 430 -type: "docs" -description: "Configure, control, debug, and manage your machines from the cloud at app.viam.com on your own or with a team." -tags: ["fleet management", "cloud", "app"] -images: ["/fleet/fleet.svg"] -no_list: true -menuindent: true -date: "2022-01-01" -# updated: "" # When the content was last entirely checked ---- - -Viam fleet management allows you to organize, manage, and control any number of machines alone or in collaboration with others. -You can manage and control your fleet of {{< glossary_tooltip term_id="machine" text="smart machines" >}} from the [Viam app](https://app.viam.com), using the [CLI](/cli/), or using the [fleet management API](/appendix/apis/fleet/). - -## Work with groups of machines - -To organize your fleet you use: - - - -{{< cards >}} -{{% manualcard link="/cloud/organizations/" %}} - -#### Organizations - -The highest level grouping, generally used for different companies. - -{{% /manualcard %}} -{{% manualcard link="/cloud/locations/" %}} - -#### Locations - -A virtual grouping of devices up with up to three levels of nesting that can represent a grouping of machines that are co-located in a building, like a factory, or a grouping of machines that are thousands of miles apart and are grouped together by function or as an organizational unit. - -An organization can have multiple locations. -{{% /manualcard %}} -{{% manualcard link="/cloud/machines/" %}} - -#### Machines - -A grouping of {{< glossary_tooltip term_id="component" text="components" >}} and {{< glossary_tooltip term_id="service" text="services" >}} across one {{< glossary_tooltip term_id="part" text="part" >}} or more parts working closely together to complete tasks. -Each machine resides in a location. - -{{% /manualcard %}} -{{< /cards >}} - -
- -{{}} - -

- -The organization structure enables you to: - -- configure groups of machines with reusable {{< glossary_tooltip term_id="fragment" text="fragments" >}} that [configure](/configure/) a set of resources for each machine that uses the fragment. -- deploy [code packages](/registry/) or [machine learning models](/services/ml/), without manually copying files by uploading it to Viam's cloud and deploying it to your fleet -- control a machine with code, the app's [**CONTROL** tab](/cloud/machines/#control), or the [Viam mobile app](/fleet/control/#control-interface-in-the-viam-mobile-app) -- obtain health metrics, such as status, uptime, version, or [logs](machines/#logs) -- perform debugging - -All of this is possible when you are close to your machine, as well as remotely from anywhere in the world. - -## Use Viam for collaboration - -When you create a Viam account, Viam automatically creates an organization for you. -You can use this organization as your collaboration hub by inviting collaborators to your organization. -You can also add additional organizations as desired at any time. - -To facilitate collaboration, you can grant individual collaborators or entire organizations granular permissions for individual machines or entire locations. -This allows you flexibility to manage internal machines, sell devices to external customers and keep managing them, and collaborate with different partners or companies on groups of machines. -For more information, see [Permissions](/cloud/rbac/#permissions). - -### Configuration - -When you or your collaborators change the configuration of a machine or a group of machines in the Viam app, `viam-server` automatically synchronizes the configuration and updates the running resources within 15 seconds. -This means everyone who has access can change a fleet's [configuration](machines/#configure), even while your machines are running. - -You can see configuration changes made by yourself or by your collaborators by selecting **History** on the right side of your machine part's card on the **CONFIGURE** tab. -You can also revert to an earlier configuration from the History tab. - -{{< alert title="Simultaneous config edits" color="caution" >}} -If you edit a config while someone else edits the same config, the person who saves last will overwrite any prior changes that aren't reflected in the new config. - -Before editing a config, we recommend you refresh the page to ensure you have all the latest changes. -{{< /alert >}} - -Machine [configuration](machines/#configure) and machine [code](/sdks/) is intentionally kept separate, allowing you to keep track of versioning and debug issues separately. - -## Next steps - -To learn about configuring and provisioning many machines, see [Deploy a Large Fleet](/fleet/). - -To learn about monitoring and remotely controlling the machines in your fleet, see [Control Interface](/fleet/control/). - -Check out the following tutorial for an example of organizing a fleet into locations, configuring multiple machines, and syncing data from all of them: - -{{< cards >}} -{{% card link="/tutorials/control/air-quality-fleet/" %}} -{{< /cards >}} diff --git a/docs/cloud/account.md b/docs/cloud/account.md deleted file mode 100644 index 00c22c3bae..0000000000 --- a/docs/cloud/account.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: "Account Management" -linkTitle: "Accounts" -weight: 80 -type: "docs" -description: "Log in and out of your Viam account." -tags: ["fleet management", "cloud", "app"] -no_list: true -aliases: - - /fleet/account/ -date: "2022-01-01" -# updated: "" # When the content was last entirely checked ---- - -the [Viam app](https://app.viam.com/) is a web UI for managing and building machines. - -## Create account and log In - -To get started on the Viam app, you must log in as an authorized user. -Viam support sign up using Google, GitHub, Apple, and email. - -Navigate to [the main page](https://app.viam.com/). -If you haven't created an account yet, click **Sign Up** to create a new account using your preferred Single Sign On method or your email address and a password. -If you already have an account, click **Log In** to log in using your Single Sign On credentials or your email address and password. - -If you forget your password to the app, click **Forgot password** and enter your email address to obtain instructions to reset your password. - -{{< alert title="Info" color="info" >}} -Accounts created from separate authentication sources are unique to each other. -{{< /alert >}} - -
- -## Sign out - -To log out or sign out of the [Viam app](https://app.viam.com/), click on your profile icon in the upper right corner of your browser window. -Click **Sign out** to sign out of accessing all organizations, locations, and machines your credentials manage. diff --git a/docs/cloud/locations.md b/docs/cloud/locations.md deleted file mode 100644 index 542ad41416..0000000000 --- a/docs/cloud/locations.md +++ /dev/null @@ -1,116 +0,0 @@ ---- -title: "Manage Locations and Sub-locations" -linkTitle: "Locations" -weight: 30 -type: "docs" -no_list: true -description: A location is a virtual grouping of machines that allows you to organize machines and manage access to your fleets. -tags: ["fleet management", "cloud", "app"] -aliases: - - /manage/fleet/locations/ - - /fleet/locations/ -date: "2022-01-01" -# updated: "" # When the content was last entirely checked ---- - -In Viam, every machine belongs to a location. -A location is a virtual grouping of machines that allows you to organize machines and manage access. -Generally, a location defines a group of machines that are geographically close to each other. -If you are familiar with Google Drive, you can think of a location as similar to a folder within a shared drive. - -For example, an organization called Good Robots Inc has two warehouses across New York and Oregon. -Good Robots Inc can organize its machines into two locations based on their physical presence in a warehouse. - -You can also use locations as proxies for environments such as "Production" and "Testing" or other groupings. -Locations do not have to correspond with physical locations. - -Each machine you add to Viam belongs to a location. -Each location belongs to an organization. - -{{< alert title="Limit" color="note" >}} -By default, you can create up to 100 locations in an organization. -If you need to create more locations, [contact support](mailto:support@viam.com). -{{< /alert >}} - -{{}} - -You can access your locations on the Viam app on the **FLEET** tab's [**LOCATIONS** subtab](https://app.viam.com/fleet/locations). - -## Add a location - -When you create a new organization, Viam automatically creates a new location for you. -You can create additional locations by typing a new location name in the **New Location** field in the left side navigation bar on the **FLEET** page's [**LOCATIONS** subtab](https://app.viam.com/fleet/locations/) and clicking **Add**. - -Click a location's name to display the list of machines associated with that location. - -## Create a sub-location - -To create a sub-location you must first create the sub-location as a location and then choose a parent location: - -1. Create a location and add at least one machine to it. -2. At the bottom of the location's page, use the **New Parent Location** dropdown to choose a parent location. -3. Click **Change**. - -You can nest locations up to three levels deep. - -To move a sub-location to the top level of locations, select **Root** from the **New Parent Location** dropdown and then click **Change**. - -## Share a location - -A location always belongs to the organization it was created in. -Members of the organization have access to all locations in the organization by default. - -For more information on the permissions the roles assign for each resource, see [Permissions](/cloud/rbac/#locations). - -You can share a location beyond its organization by sharing a location with an additional organization: - -### Share a location with an additional organization - -Share your location with another organization you belong to by selecting the organization from the **Add Organization** dropdown menu and clicking **Share**. - -To share your location with an organization you are not a member of, select the location or enter the organization ID (a string like `1ab2c3d1-1234-123a-abcd-abcdef123456`) and click **Share**. -Members of the org can find the org ID on their org settings page. - -{{% alert title="Note" color="info" %}} - -Once you share a _nested_ location (sub-location), its parent location cannot be changed. - -{{% /alert %}} - -#### Remove an organization from a shared location - -You can remove any organization except the primary owner from the shared list by clicking the **X** to the right of the location in the shared list. - - - -#### Rotate a secret key - -If you ever need to rotate this key, click on the **Generate Key** button to generate a new key. - -Viam supports flexible key rotation with up to two keys in use at one time. -After generating a new secret key, update all references to the key in your code as soon as possible and then remove the old key. - -### Share a location with Viam support - -If you request support, you must share your location with the Viam Support team. -To do so, navigate to the location you need support with and click, **Add Viam support**. - -Once you have received support, you can remove Viam Support from your location by clicking **Remove Viam support**. - -## Delete a location - -You can delete a location that is _empty of machines_ by clicking the trash can icon next to the location name at the top of the page for that location. -The icon will not appear if there are any machines in the location. diff --git a/docs/cloud/machines.md b/docs/cloud/machines.md deleted file mode 100644 index d48e0c91f5..0000000000 --- a/docs/cloud/machines.md +++ /dev/null @@ -1,138 +0,0 @@ ---- -title: "Manage Machines" -linkTitle: "Machines" -weight: 10 -type: "docs" -description: "A machine is an organizational concept, consisting of either one or multiple parts working closely together to complete tasks." -tags: ["fleet management", "cloud", "app"] -images: ["/fleet/app-usage/create-machine.png"] -aliases: - - /fleet/robots/ - - /manage/fleet/machines/ - - /fleet/machines/ -date: "2022-01-01" -# updated: "" # When the content was last entirely checked ---- - -A _machine_ is an organizational concept, consisting of either one {{< glossary_tooltip term_id="part" text="part" >}}, or multiple _parts_ working closely together to complete tasks. -The machine represents the configuration and entry point for one or more computers (and the components they control) coupled into one logical grouping of parts that work together to complete tasks. -A machine usually reflects a physical device, from a camera collecting images, to a wheeled rover, or an articulated arm on a factory floor. -A machine always has a main part that receives client requests, and any number of other parts. - -## Add a new machine - -Add a new machine in the [Viam app](https://app.viam.com) by clicking **+ Add machine**, providing a name in the **New machine** field and clicking **Add machine** again. - -![The 'First Location' page on the Viam app with a new machine name in the New machine field and the Add Machine button next to the field highlighted.](/fleet/app-usage/create-machine.png) - -Click the name of a machine to go to that machine's page, where you'll find a variety of tools for working with your machine. - -## Navigating the machine page - -Next to the machine name, there is an indicator of the machine's status. -Click on the **status** dropdown to open a menu with information about each {{< glossary_tooltip term_id="part" text="part" >}} of your machine. -Once you connect to the `viam-server` instance on a part, this display includes its OS, Host, `viam-server` version, IP addresses, and what time it was last online or remote address (if live): - -![The machine page with part menu expanded](/fleet/app-usage/machine-page.png) - -### Set up a new machine - - - -To connect to the `viam-server` instance on a part, follow the setup instructions. -Open the part status dropdown menu in the top left corner of the page, next to the machine's name. -Click **View setup instructions** to open the setup instructions. - -Select your system's architecture and select the version of the {{< glossary_tooltip term_id="RDK" text="RDK" >}} to use. -Then, follow the instructions on the page to connect and set up your machine. - -{{% alert title="Tip" color="tip" %}} -If your machine is controlled by a microcontroller, install the [**viam-micro-server**](/installation/viam-micro-server-setup/#install-viam-micro-server) instead of full `viam-server`. -{{% /alert %}} - -More in-depth information on installing `viam-server` can be found in our [Installation Guide](/installation/viam-server-setup/#install-viam-server). - -Once all parts of your machine are set up and connected to the app, the part status display at the top left corner of the page turns green. -Now, you can manage your machine with one of four tabs: **CONFIGURE**, **CONTROL**, **LOGS**, and **CONNECT**: - -{{}} - -### CONFIGURE - -The configuration of a machine describes the {{< glossary_tooltip term_id="resource" text="resources" >}} that it has access to. -When a {{< glossary_tooltip term_id="part" text="machine part" >}} that is managed with the Viam app first comes online, it requests its configuration from the [Viam app](https://app.viam.com). -Once the machine has a configuration, it caches it locally and can use the configuration for up to 60 days. -The machine checks for new configurations every 15 seconds and changes its configuration automatically when a new configuration is available. - -After connecting your machine, go to the **CONFIGURE** tab, and start adding {{< glossary_tooltip term_id="component" text="components" >}}, {{< glossary_tooltip term_id="service" text="services" >}}, and other {{< glossary_tooltip term_id="resource" text="resources" >}}. - - - -The Viam app keeps a record of your configuration changes, allowing you to revert to earlier configurations if needed. -To see the history of the configuration of a machine part, click on **History** on the right side of its card on the **CONFIGURE** tab. - -For more information, see the [configuration documentation](/configure/#the-configure-tab). - -{{< alert title="Tip" color="tip" >}} -If you are managing a large fleet, you can use {{< glossary_tooltip term_id="fragment" text="fragments" >}} when [configuring your fleet](/fleet/fragments/). -{{< /alert >}} - -### CONTROL - -Once you have configured components and services for your machine, you can visually test and remotely operate them from the **CONTROL** tab in the [Viam app](https://app.viam.com) or the [Viam mobile app](/fleet/control/#control-interface-in-the-viam-mobile-app). - -{{}} - -You can also switch between different machine parts by selecting the part from the left-hand menu. - -For more information, see [Control machines](/fleet/control/). - -### LOGS - -To make debugging issues with your machines easier, each machine automatically sends its logs to the cloud. -You can access your logs from the **LOGS** tab in the [Viam app](https://app.viam.com) and filter your logs for specific keywords or log levels: - -{{}} - -You can click on the part names in the left-hand menu to switch logs between parts. You can also change your timestamp format to ISO or Local depending on your preference. - -To view logs in the Viam mobile app: - -1. Select an organization clicking on the menu icon in the top left corner and tapping an organization. -2. Tap the **Locations** tab and tap on a location and then on a machine. -3. Click the menu button marked "**...**" in the upper right corner. -4. Click **View Logs**. - -### CONNECT - -#### Code sample - -To start programming your machine, go to the **CONNECT** tab and select the **Code sample** page. -This has sample code snippets you can copy and paste into your control code to connect to your machine. - -{{% snippet "show-secret.md" %}} - -For more information on the SDKs, see [Write control code with Viam's SDKs](/appendix/apis/). - -#### Configure as remote part - -On the **CONNECT** tab, there is also a page called **Configure as remote part**. -This page has instructions for how to configure a {{< glossary_tooltip term_id="part" text="part" >}} of your machine as a [remote part](/architecture/parts/) of another machine. - -#### API keys - -Your machine and the Viam app communicate securely using [WebRTC](https://pkg.go.dev/go.viam.com/utils@v0.0.3/rpc#hdr-Connection) with unique secrets. -The **API keys** page of the **CONNECT** tab allows you to access, generate, and delete your [API keys](/cloud/rbac/#api-keys), which grant access to organizations, locations, and machines. - -![The Security tab of a machine's page noting the Machine part API keys dropdown menu, with the clipboard icon on the far right and the Generate Key button underneath the dropdown.](/fleet/app-usage/machine-secrets.png) - -Copy an API key or API key ID by clicking on the clipboard icon. -Click **Show details** and **Access settings** to go to your organization settings page, where you can modify the access your API keys provide. - -{{% snippet "secret-share.md" %}} - -## Delete a machine - -To delete a machine, click on the **...** menu in the top right hand corner of its page, select **Delete machine**, and confirm that you're sure. - -{{< imgproc alt="The delete machine button and the confirmation checkbox (Sure?) next to it." src="/fleet/app-usage/delete.png" resize="300x" >}} diff --git a/docs/cloud/organizations.md b/docs/cloud/organizations.md deleted file mode 100644 index b00e269c44..0000000000 --- a/docs/cloud/organizations.md +++ /dev/null @@ -1,94 +0,0 @@ ---- -title: "Manage Organizations" -linkTitle: "Organizations" -weight: 30 -type: "docs" -description: "An organization is a group of one or more locations that helps you organize and manage access to your fleet." -tags: ["fleet management", "cloud", "app"] -aliases: - - /manage/fleet/organizations/ - - /fleet/organizations/ -date: "2022-01-01" -# updated: "" # When the content was last entirely checked ---- - -An organization is a group of one or more locations that helps you organize your fleet. - -An organization is the highest level grouping in the Viam platform, which generally represents a company, or other institution. -You can also use organizations for departments or other entities that can have one or more [locations](/cloud/locations/). -If you are familiar with Google Drive, you can think of an organization as a shared drive. - -{{}} - -When you or another user registers for an account with Viam, they become a member of an organization. -If the user was invited to an organization, they become a part of that organization. -If the user registered without invitation, an organization and a {{< glossary_tooltip term_id="location" text="location" >}} is automatically created for the user. - -A user can create more organizations at any time. - -Any member of an organization can invite new users to that organization. - -For example, you may have an account with one organization for your personal smart machines at home and another organization for the smart machines at work. - -{{}} - -You organization is shown in the upper right corner of the [Viam app](https://app.viam.com). -If you click on the organization dropdown, the app displays your name, email, and a list of organizations you belong to. - -{{< imgproc alt="The org dropdown showing an example user's name, email, Sign out button, list of organizations, and org settings button." src="/fleet/app-usage/my-org.png" resize="400x" declaredimensions=true >}} - -If you used an email invite to sign up, you have two organizations to begin with: the organization that invited you and a personal organization for other projects. - -Click an organization's name to navigate to its list of locations. - -### Create a new organization - -To create a new organization, click on the Org's **Settings** in the top right of the navigation bar. -Then enter the name for your new organization in the **New Organization** field in the upper left of the page. - -### Invite someone to an organization - -To invite a user to your organization, click on the Org's **Settings** in the top right of the navigation bar. -In the members section of the page, click on **Grant access** and enter their email address. -Then select the resource that you would like to grant the user access to and the designated role and click **Invite**. - -{{< imgproc alt="The user invitation menu on the Organization settings page." src="/fleet/app-usage/invite-user.png" resize="900x" declaredimensions=true >}} - -You can grant a user access to the following resources: - -- an {{< glossary_tooltip term_id="organization" text="organization" >}} -- a {{< glossary_tooltip term_id="location" text="location" >}} -- a {{< glossary_tooltip term_id="machine" text="machine" >}} - -For more information on the permissions the roles assign for each resource, see [Permissions](/cloud/rbac/#permissions). - -#### Use the mobile app - -You can also use the [Viam mobile app](/fleet/control/#control-interface-in-the-viam-mobile-app) to invite users to your organization, on the go. Navigate to Home on the mobile app, and select your organization. Click the gear icon in the upper right corner to access the mobile organization settings page. On the settings page enter an email address, select a role, and tap **Grant Access**. - -### Create a namespace for your organization - -When uploading [custom modules](/registry/) to the Viam Registry, you must set a namespace for your organization to associate your module with. - -To create a new namespace for your organization, click on the Org's **Settings** in the top right of the navigation bar, then click the **Set a public namespace** button. -Enter a name or use the suggested name for your namespace, and then click **Set namespace**. -Consider the following as you chose a namespace: - -- A namespace may only contain letters, numbers, and the dash (`-`) character. -- Once set, a namespace _cannot be changed_: choose your namespace carefully! -- You must pick a unique namespace that is not already in use by another organization. -- As you enter your namespace, a message will appear to the right of the text box indicating whether the namespace is available, or whether an invalid character is detected. - -{{< imgproc alt="The namespace creation menu on the Organization settings page." src="/fleet/app-usage/create-namespace.png" resize="700x" declaredimensions=true >}} - -### Leave an organization - -To leave an organization, click on the Org's **Settings** in the top right of the navigation bar. -Then click **Leave organization**. - -### Delete an organization - -To delete an organization, click on the Org's **Settings** in the top right of the navigation bar. -Then click **Delete organization**. - -If the organization to delete contains any locations, you must delete them before you can delete the organization. diff --git a/docs/configure/_index.md b/docs/configure/_index.md deleted file mode 100644 index 2fbe574f6b..0000000000 --- a/docs/configure/_index.md +++ /dev/null @@ -1,394 +0,0 @@ ---- -title: "Machine Configuration" -linkTitle: "Machine Configuration" -weight: 429 -type: "docs" -description: "Before you can program a machine, you must configure its components and services as well as any modules, remotes, processes and frames." -imageAlt: "Configure a Machine" -images: ["/viam.svg"] -tags: ["manage", "components"] -aliases: - - /manage/configuration/ - - /build/configure/ - - /build/ -no_list: true -menuindent: true -date: "2022-01-01" -# updated: "" # When the content was last entirely checked ---- - -Before you can program a smart machine, you must configure it. - -A machine's configuration defines the _{{< glossary_tooltip term_id="resource" text="resources" >}}_ (hardware and software services) it has access to, as well as any relevant parameters for those resources. -You can configure the following resources: - -- [Components](/configure/#components): _{{< glossary_tooltip term_id="component" text="Components" >}}_ are the hardware of your machine. -- [Services](/configure/#services): _{{< glossary_tooltip term_id="service" text="Services" >}}_ are the software that runs on your machine. -- [Processes](/configure/#processes): Processes automatically run specified scripts when the machine boots. -- [Modules](/configure/#modules): {{< glossary_tooltip term_id="module" text="Modules" >}} provide {{< glossary_tooltip term_id="modular-resource" text="modular resources" >}}, which are a way to add resource types or models that are not built into Viam. -- [Remote parts](/configure/#remote-parts): Remotes are a way to connect two separate machines so one can access the resources of the other. -- [Sub-parts](/configure/#sub-parts): Sub-parts are a way to connect two computers inside the same machine. -- [Fragments](/configure/#fragments): Fragments are a way of sharing and managing identical configuration files (or parts of config files) across multiple machines. -- [Frames](#frames): Frames hold reference frame information for the relative position of components in space. -- [Triggers](/configure/#triggers): Triggers allow you to trigger actions when certain types of data are sent from your machine to the cloud, or when the internet connectivity of your machine changes. -- [Network](/configure/#network): Networking options allow you to configure the bind address for accepting connections. - -To start configuring, go to the [Viam app](https://app.viam.com) and create a new machine. -Open the part status dropdown menu in the top left corner of the page, next to the machine's name. -Click **View setup instructions** to open the setup instructions. -Follow the appropriate instructions for your machine's architecture. - -The setup steps copy your machine's credentials to your machine. -When you turn on your machine, `viam-server` starts up and uses the provided credentials to fetch its full config from the [Viam app](https://app.viam.com). -Once the machine has a configuration, it caches it locally and can use the configuration for up to 60 days. -Since the configuration is cached locally, your machine does not need to stay connected to the Viam app after it has obtained its configuration file. - -If it is online, the machine checks for new configurations every 15 seconds and changes its configuration automatically when a new configuration is available. -All communication happens securely over HTTPS using secret tokens that are in a machine's configuration. - -If your machine will never connect to the internet, you can also create a [local configuration file](/internals/local-configuration-file/) on the machine itself. - -{{< alert title="Tip" color="tip" >}} -On Linux, the configuration is stored at /etc/viam.json by default and `viam-server` uses this configuration if no configuration is specified on startup. - -You can store your config file in a custom location if desired. -See [Run `viam-server`](/installation/manage-viam-server/#run-viam-server) for more information. -{{< /alert >}} - -After you have completed the setup steps and successfully connected to your machine, go to the **CONFIGURE** tab to start adding to the configuration. - -## The CONFIGURE tab - -The **CONFIGURE** tab on the [Viam app](https://app.viam.com) is the place to configure everything about your machine. - -You can switch between **Builder**, **JSON**, and **Frame** mode by clicking on the icon in the upper left-hand corner: - -![Mode selector on CONFIGURE tab.](/build/configure/mode-selector.png) - -- **Builder** mode provides a graphical interface for configuring your machine resources. -- **JSON** mode provides a text editing field where you can write and edit the config manually. -- **Frame** mode provides a graphical interface for configuring and visualizing the relative position of components in space. - For more information, see the [Frame System documentation](/services/frame-system/). - -Regardless of the editing mode you choose, Viam stores the configuration file in [JSON (JavaScript Object Notation)](https://en.wikipedia.org/wiki/JSON). - -{{< alert title="Caution: Simultaneous config edits" color="caution" >}} -If you edit a config while someone else edits the same config, the person who saves last will overwrite any prior changes that aren't reflected in the new config. - -Before editing a config, we recommend you refresh the page to ensure you have all the latest changes. -{{< /alert >}} - -If you add components in **Builder** mode and click **Save** in the top right corner of the screen, you can switch to **JSON** and see the JSON that has been generated by the builder. - -{{% expand "An example JSON config file for a machine with a board component, motor component, camera component, and vision service configured" %}} - -```json -{ - "components": [ - { - "name": "local", - "model": "pi", - "type": "board", - "namespace": "rdk", - "attributes": {}, - "depends_on": [] - }, - { - "name": "my-motor", - "model": "gpio", - "type": "motor", - "namespace": "rdk", - "attributes": { - "pins": { - "a": "13", - "b": "15" - }, - "board": "local", - "max_rpm": 120 - }, - "depends_on": [] - }, - { - "name": "my_camera", - "model": "webcam", - "type": "camera", - "namespace": "rdk", - "attributes": { - "video_path": "video0" - } - } - ], - "services": [ - { - "name": "detector", - "type": "vision", - "attributes": { - "register_models": [ - { - "parameters": { - "segment_size_px": 200, - "hue_tolerance_pct": 0.05, - "detect_color": "#19FFD9" - }, - "type": "color_detector", - "name": "green_detector" - } - ] - } - } - ], - "modules": [] -} -``` - -See [Example JSON configuration file](/internals/local-configuration-file/#example-json-configuration-file) for an additional example. - -{{% /expand %}} - - - -### Components - -Components represent the pieces of hardware on your machine that you want to control with Viam. -To add a new component, click the **+** icon next to your {{< glossary_tooltip term_id="part" text="machine part" >}} in the left-hand menu of the **CONFIGURE** tab and select **Component** or hit **C**. -Search for and select your desired {{< glossary_tooltip term_id="model" text="model" >}}. - -You must configure each component with a type, a model, a name, attributes, and dependencies: - -- `type`: The broad component category, such as `motor`, `arm` or `camera`. - Components of a given type have a common API. - -- `model`: Indicates the more specific category of hardware. - Components of the same model are supported using the same low-level code. - -- `name`: Serves as an identifier when accessing the resource from your code, as well as when configuring other resources that are dependent on that resource. - You can either accept the suggested default name when creating a component or choose a unique name for a component. - The name must start with a letter or number and only contain letters, numbers, dashes, and underscores with a max length of 60. - -- `attributes`: Configure component details such as how the component is wired to the machine, its dimensions, and other specifications; attributes vary widely between models. - See a {{< glossary_tooltip term_id="component" text="component" >}}'s documentation for more details. - -- `depends_on`: Any components that a given component relies upon, and that must be initialized on boot before this component is initialized. - Many built-in components have convenient implicit dependencies, in which case `depends_on` can be left blank. - For example, a [`gpio` motor](/components/motor/gpio/) depends on the `board` to which it is wired, but it has a dedicated `board` attribute and `viam-server` will automatically initialize that board before it looks for the motor. - -- `log_configuration`: Specify the log level for a resource. The default log level is `"Info"`. For example: - - ```json - "log_configuration": { - "level": "Debug" - } - ``` - -For specific information on how to configure each supported component type, see the {{< glossary_tooltip term_id="component" text="component" >}}'s documentation: - -{{< cards >}} -{{% relatedcard link="/components/arm" %}} -{{% relatedcard link="/components/base" %}} -{{% relatedcard link="/components/board" %}} -{{% relatedcard link="/components/camera" %}} -{{% relatedcard link="/components/encoder" %}} -{{% relatedcard link="/components/gantry" %}} -{{% relatedcard link="/components/generic" %}} -{{% relatedcard link="/components/gripper" %}} -{{% relatedcard link="/components/input-controller" %}} -{{% relatedcard link="/components/motor" %}} -{{% relatedcard link="/components/movement-sensor" %}} -{{% relatedcard link="/components/power-sensor" %}} -{{% relatedcard link="/components/sensor" %}} -{{% relatedcard link="/components/servo" %}} -{{< /cards >}} - -Some resources have a **TEST** section on the bottom half of their configuration pane which you can expand and interact with to test out controlling the component. -You must be running `viam-server` and connected to your machine to use this feature. - -{{}} - -On the **...** menu in the upper right corner of each resource you can **Duplicate**, **Delete**, and **Disable** or **Enable** it. - -{{}} - -{{% alert title="Tip" color="tip" %}} - -When you configure a component on the **CONFIGURE** tab, it will also appear on the **CONTROL** tab which gives you an interface to interact with it. -The **Code sample** page on the **CONNECT** tab will also update to include code for some basic interaction with that component using the Viam [SDKs](/appendix/apis/). - -{{% /alert %}} - -### Services - -Services are software packages that make it easier to add complex capabilities such as motion planning or object detection to your machine. -To add a new service, click the **+** icon next to your {{< glossary_tooltip term_id="part" text="machine part" >}} in the left-hand menu of the **CONFIGURE** tab and select **Service** or hit **S**. -Search for and select your desired {{< glossary_tooltip term_id="model" text="model" >}}. - -You must configure a service with a `name` and a `type`: - -- `type`: specifies which of the Viam services you want to use on your machine, such as the vision service or the motion service. -- `name`: serves as an identifier when accessing the resource from your code, as well as when configuring other resources that are dependent on that resource. - You can accept the suggested default name when creating a service or choose a choose any unique name for a service. - The name must start with a letter or number and can only contain letters, numbers, dashes, and underscores with a max length of 60. -- `log_configuration`: Specify the log level for a resource. The default log level is `"Info"`. For example: - - ```json - "log_configuration": { - "level": "Debug" - } - ``` - -The other aspects of configuring a service are highly specific to the type of service, review the docs for the service you are interested in: - -{{< cards >}} -{{% relatedcard link="/services/data/" %}} -{{% relatedcard link="/services/ml/" alt_title="Machine Learning" %}} -{{% relatedcard link="/services/motion" %}} -{{% relatedcard link="/services/navigation" %}} -{{% relatedcard link="/services/slam" %}} -{{% relatedcard link="/services/vision" %}} -{{% relatedcard link="/services/generic" %}} -{{% relatedcard link="/services/frame-system" %}} -{{< /cards >}} - -Some resources have a **TEST** section on the bottom half of their configuration pane which you can expand and interact with to test out controlling the service. -You must be running `viam-server` and connected to your machine to use this feature. - -You can disable a service without removing it from the configuration by selecting the **...** menu in the upper right corner and selecting **Disable**. - -{{% alert title="Tip" color="tip" %}} - -When you configure a service on the **CONFIGURE** tab, it will also appear on the **CONTROL** tab which gives you an interface to test and interact with it. -The **Code sample** page on the **CONNECT** tab will also update to include code for some basic interaction with that service using the Viam [SDKs](/appendix/apis/). - -{{% /alert %}} - -### Processes - -To automatically run a specified command when the machine boots, configure a _{{< glossary_tooltip term_id="process" text="process" >}}_. -You can configure any command, for example one that executes a binary or a script, to run as a process. - -To add a new process, click the **+** icon next to your {{< glossary_tooltip term_id="part" text="machine part" >}} in the left-hand menu of the **CONFIGURE** tab and select **Process**. -Find more information in the [processes documentation](/configure/processes/). - -### Modules - -Modules allow you to add [modular resources](/registry/) to your machines which add resource types or models that are not built into Viam. -Many models are available in the [registry](https://app.viam.com/registry) and you are able to add them as components or services. - -#### Local Modules - -To add a module that is not in the registry and is local to your machine, click the **+** icon next to your {{< glossary_tooltip term_id="part" text="machine part" >}} in the left-hand menu of the **CONFIGURE** tab and select **Local module**. -Follow the instructions in our [registry documentation](/registry/modular-resources/#configuration) to configure the module. - -### Remote parts - -Configuring a remote part is a way to connect two separate machines so one can access the resources of the other. - -To configure a remote part, click the **+** icon next to your {{< glossary_tooltip term_id="part" text="machine part" >}} in the left-hand menu of the **CONFIGURE** tab and select **Remote part**. -Find more information in our [machine parts documentation](/architecture/parts/). - -### Sub-parts - -Configure a sub-part to connect two computers inside the same machine. - -To configure a sub-part, click the **+** icon next to your {{< glossary_tooltip term_id="part" text="machine part" >}} in the left-hand menu of the **CONFIGURE** tab and select **Sub-part**. -Find more information in our [machine parts documentation](/architecture/parts/). - -### Fragments - -You can use fragments to share similar {{< glossary_tooltip term_id="resource" text="resource" >}} configuration files across multiple machines in your fleet. -For example, if you have multiple machines with the same motor hardware, wired the same way, you can create a fragment to configure that motor and share it easily across all of your machines, without needing to individually configure the motor component for each machine. - -To use a fragment, click the **+** icon next to your {{< glossary_tooltip term_id="part" text="machine part" >}} in the left-hand menu of the **CONFIGURE** tab and select **Insert fragment**. -See [Use Fragments to Configure a Fleet](/fleet/fragments/) for more information on creating and deploying fragments. - -### Frames - -The frame system holds reference frame information for the relative position of components in space. - -Click on the **Frame** mode to visualize and configure the relative positions of components. -Find more information in the [frame system documentation](/services/frame-system/). - -### Triggers - -Triggers allow you to trigger actions when certain types of data are sent from your machine to the cloud, or when the internet connectivity of your machine changes. -For example, you can configure a trigger to send you a notification when your robot's sensor collects a new reading. - -See [Configure a Trigger](/configure/triggers/) for more information on triggers. - -### Network - -You can configure your machine's bind address and heartbeat window. - -On your machine's **CONFIGURE** tab, click the **+** button and select **Network**. - -{{< tabs >}} -{{% tab name="Builder UI" %}} - -In the **network** panel, configure your **Bind address** and your **Heartbeat window**. - -{{% /tab %}} -{{% tab name="JSON" %}} - -```json - ... // components {...}, services {...}, - "network": { - "bind_address": "0.0.0.0:8080", - "sessions": { - "heartbeat_window": "30s" // Changes heartbeat window to 30 seconds - } - }, - ... -``` - -{{% /tab %}} -{{< /tabs >}} - - -| Attribute | Type | Required? | Description | -| --------- | ---- | --------- | ----------- | -| `bind_address` | string | Optional | The address `viam-server` binds to for accepting connections. Default: `"0.0.0.0:8080"` when managed by the Viam app or when authentication and TLS are enabled. | -| `sessions.heartbeat_window` | string | Optional | A _heartbeat_ is a signal that indicates machine connectivity. Heartbeats are sent automatically from Viam's SDKs unless you disable them with the session management API or session management is not implemented by the server in question. Heartbeats are automatically sent at an interval that is one fifth of the heartbeat window. Default: `"2s"`. Use a higher value in high-latency networks. Requires a restart to take effect. | - -## Configuration History - -The Viam app keeps a record of each machine's configuration changes, allowing you to revert to earlier configurations if needed and see who made specific changes. - -To see the configuration history for a machine part, click on the **History** link at the top right corner of the machine part's card in the Viam app. - -{{}} - -To restore to an earlier version of your configuration, click the **Restore version** button next to the desired configuration. - -## Troubleshooting - -If you run into issues, here are some things to try: - -- Check the [**LOGS** tab](/cloud/machines/#logs) to view log messages and errors from `viam-server`. - You can also [access the local log file](/installation/manage-viam-server/#view-viam-server-logs) on your machine if needed. -- Make sure all configured components are saved to your config. - If they aren't, you will see an **Unsaved changes** note next to the **Save** button in the top right corner of the page. -- Try restarting `viam-server` by navigating to the app's **CONFIGURE** tab in **Builder** mode, clicking the **...** menu on the right side of the machine part's card, and selecting **Restart part**. - It takes a few minutes for the server to shut down and restart. -- If you need to revert to an earlier configuration, use the [Configuration History](#configuration-history) to restore to an earlier version. -- Make sure the issue is not hardware related. - Some things to check are that the machine has adequate power, all wires are properly connected, and no chips or other hardware components are shorted or overheated. -- See [Troubleshooting](/appendix/troubleshooting/) for additional troubleshooting steps. -- {{< snippet "social.md" >}} - -## Local setup - -Configuring `viam-server` with the Viam app allows you to use Viam's cloud features: - -- [Fleet Management](/fleet/) -- [Data Management](/services/data/) -- [Machine Learning](/services/ml/) - -However, if you are configuring a machine that can never connect to the internet, you can create a [local configuration file](/internals/local-configuration-file/) on your machine. -A locally-configured machine will not be able to access Viam's cloud features. - -## Next steps - -After configuring your machine, you can use the [Viam SDKs](/appendix/apis/) to program and control your machine. - -If you want to try configuring a machine but don't have any hardware on hand, try the [Build a Mock Robot](/tutorials/configure/build-a-mock-robot/) tutorial. diff --git a/docs/configure/triggers.md b/docs/configure/triggers.md deleted file mode 100644 index 07c2986362..0000000000 --- a/docs/configure/triggers.md +++ /dev/null @@ -1,491 +0,0 @@ ---- -title: "Configure a Trigger" -linkTitle: "Triggers" -weight: 50 -type: "docs" -description: "Configure a trigger to trigger actions when data is sent from your machine to the cloud, or when your machine's internet connectivity changes." -tags: ["triggers"] -aliases: - - /build/configure/webhooks/ - - /build/configure/triggers/ -date: "2024-10-17" -# updated: "" # When the content was last entirely checked ---- - -Triggers allow you to send webhook requests or emails for the following events: - -- **Data has been synced to the cloud**: trigger when data from the machine is synced -- **Part is online**: trigger continuously at a specified interval while the {{< glossary_tooltip term_id="part" text="machine part" >}} is online -- **Part is offline**: trigger continuously at a specified interval while the machine part is offline -- **Conditional data ingestion**: trigger any time data is captured from a specified component with a specified method and condition - -For example, you can configure a trigger to send you a notification when your robot's sensor collects a new reading. - -To configure a trigger: - -{{< tabs >}} -{{% tab name="Builder mode" %}} - -1. Go to the **CONFIGURE** tab of your machine on the [Viam app](https://app.viam.com). - Click the **+** (Create) button in the left side menu and select **Trigger**. - - {{}} - -2. Name the trigger and click **Create**. - -3. Select trigger **Type**. - For some types you can configure additional attributes: - -{{< tabs name="Types of Triggers" >}} -{{% tab name="Data synced to cloud" %}} - -Select the data types for which the Trigger should send requests. -Whenever data of the specified data types is ingested, a `POST` request will be sent. - -{{% /tab %}} -{{% tab name="Conditional data ingestion" %}} - -Select the component you want to capture data from and the method you want to capture data from. -Then, add any conditions. - -These can include a key, a value, and a logical operator. -For example, a trigger configured to fire when data is captured from the motor `motor-1`'s `IsPowered` method when `is_on` is equal to `True`: - -{{}} - -For more information, see [Conditions](#conditions). - -{{% alert title="Note" color="note" %}} -You must [configure data capture](/services/data/) for your component to use this trigger. -{{% /alert %}} - -{{% /tab %}} -{{< /tabs >}} - -4. Add **Webhooks** or **Emails**. - -{{< tabs name="Notifications types" >}} -{{% tab name="Webhooks" %}} - -Click **Add Webhook**. -Add the URL of your cloud function or lambda. -Configure the time between notifications. - -![The trigger configured with an example URL in the Viam app.](/build/configure/trigger-configured.png) - -{{% /tab %}} -{{% tab name="Emails" %}} - -Click **Add Email**. -Add the email you wish to be notified whenever this trigger is triggered. -Configure the time between notifications. - -![The trigger configured with an example email in the Viam app.](/build/configure/trigger-configured-email.png) - -{{% /tab %}} -{{< /tabs >}} -{{% /tab %}} -{{% tab name="JSON mode" %}} - -To configure your trigger by using **JSON** mode instead of **Builder** mode, paste one of the following JSON templates into your JSON config. -`"triggers"` is a top-level section, similar to `"components"` or `"services"`. - -{{< tabs >}} -{{% tab name="JSON Template: Data Synced" %}} - -```json {class="line-numbers linkable-line-numbers"} - "triggers": [ - { - "name": "", - "event": { - "type": "part_data_ingested", - "data_ingested": { - "data_types": ["binary", "tabular", "file"] - } - }, - "notifications": [ - { - "type": "webhook", - "value": "https://1abcde2ab3cd4efg5abcdefgh10zyxwv.lambda-url.us-east-1.on.aws", - "seconds_between_notifications": - } - ] - } - ] -``` - -{{% /tab %}} -{{% tab name="JSON Template: Part Online" %}} - -```json {class="line-numbers linkable-line-numbers"} - "triggers": [ - { - "name": "", - "event": { - "type": "part_online" - }, - "notifications": [ - { - "type": "webhook", - "value": "", - "seconds_between_notifications": - } - ] - } - ] -``` - -{{% /tab %}} -{{% tab name="JSON Template: Part Offline" %}} - -```json {class="line-numbers linkable-line-numbers"} - "triggers": [ - { - "name": "", - "event": { - "type": "part_offline" - }, - "notifications": [ - { - "type": "webhook", - "value": "", - "seconds_between_notifications": - } - ] - } - ] -``` - -{{% /tab %}} -{{% tab name="JSON Template: Conditional Data Ingestion" %}} - -```json {class="line-numbers linkable-line-numbers"} -"triggers": [ - { - "name": "", - "event": { - "type": "conditional_data_ingested", - "conditional": { - "data_capture_method": "::", - "condition": { - "evals": [ - { - "operator": "", - "value": - } - ] - } - } - }, - "notifications": [ - { - "type": "email", - "value": "", - "seconds_between_notifications": - } - ] - } -] - -``` - -{{% /tab %}} -{{% tab name="JSON Example" %}} - -```json {class="line-numbers linkable-line-numbers"} -{ - "components": [ - { - "name": "local", - "model": "pi", - "type": "board", - "namespace": "rdk", - "attributes": {}, - "depends_on": [] - }, - { - "name": "my_temp_sensor", - "model": "bme280", - "type": "sensor", - "namespace": "rdk", - "attributes": {}, - "depends_on": [], - "service_configs": [ - { - "type": "data_manager", - "attributes": { - "capture_methods": [ - { - "method": "Readings", - "additional_params": {}, - "capture_frequency_hz": 0.017 - } - ] - } - } - ] - } - ], - "triggers": [ - { - "name": "trigger-1", - "event": { - "type": "part_data_ingested", - "data_ingested": { - "data_types": ["binary", "tabular", "file"] - } - }, - "notifications": [ - { - "type": "webhook", - "value": "", - "seconds_between_notifications": 0 - } - ] - } - ] -} -``` - -{{% /tab %}} -{{< /tabs >}} - -{{% /tab %}} -{{< /tabs >}} - -The following attributes are available for triggers: - - -| Name | Type | Required? | Description | -| ---- | ---- | --------- | ----------- | -| `name` | string | **Required** | The name of the trigger | -| `event` | object | **Required** | The trigger event object:
  • `type`: The type of the event to trigger on. Options: `part_online`, `part_offline`, `part_data_ingested`, `conditional_data_ingested`.
  • `data_types`: Required with `type` `part_data_ingested`. The data types that trigger the event. Options: `binary`, `tabular`, `file`, `unspecified`.
  • `conditional`: Required with `type` `conditional_data_ingested`. See [Conditions](#conditions) for more information.
| -| `notifications` | object | **Required** | The notifications object:
  • `type`: The type of the notification. Options: `webhook`, `email`
  • `value`: The URL to send the request to or the email address to notify.
  • `seconds_between_notifications`: The interval between notifications in seconds.
| - -#### Conditions - -The `conditional` object for the `conditional_data_ingested` trigger includes the following options: - - -| Name | Type | Required? | Description | -| ---- | ---- | --------- | ----------- | -| `data_capture_method` | string | **Required** | The method of data capture to trigger on.
Example: `sensor::Readings`. | -| `condition` | object | Optional | Any additional conditions for the method to fire the trigger. Leave out this object for the trigger to fire any time there is data synced.
Options:
  • `evals`:
    • `operator`: Logical operator for the condition.
    • `value`: An object, string, or integer that specifies the value of the method of the condition, along with the key or nested keys of the measurements in data capture.
| - -Options for `operator`: - -| Name | Description | -| ----- | ------------------------ | -| `lt` | Less than | -| `gt` | Greater than | -| `lte` | Less than or equal to | -| `gte` | Greater than or equal to | -| `eq` | Equals | -| `neq` | Does not equal | - -Examples: - -{{< tabs >}} -{{% tab name="1 level of nesting" %}} - -```json {class="line-numbers linkable-line-numbers"} -"condition": { - "evals": [ - { - "operator": "lt", - "value": { - "Line-Neutral AC RMS Voltage": 130 - } - } - ] -} -``` - -This eval would trigger for the following sensor reading: - -```json {class="line-numbers linkable-line-numbers"} -{ - "readings": { - "Line-Neutral AC RMS Voltage": 100 - } -} -``` - -{{% /tab %}} -{{% tab name="2 levels of nesting" %}} - -```json {class="line-numbers linkable-line-numbers"} -"condition": { - "evals": [ - { - "operator": "lt", - "value": { - "coordinate": { - "latitude": 50 - } - } - } - ] -} -``` - -This eval would trigger for the following sensor reading: - -```json {class="line-numbers linkable-line-numbers"} -{ - "readings": { - "coordinate": { - "latitude": 40 - } - } -} -``` - -{{% /tab %}} -{{< /tabs >}} - -5. Write your cloud function or lambda to process the request from `viam-server`. - You can use your cloud function or lambda to interact with any external API such as, for example, Twilio, PagerDuty, or Zapier. - The following example function prints the received headers: - - {{< tabs >}} - {{% tab name="Flask" %}} - -```python {class="line-numbers linkable-line-numbers" } -from flask import Flask, request - -app = Flask(__name__) - - -@app.route("/", methods=['GET', 'POST']) -def trigger(): - headers = request.headers - data = {} - if request.data: - data = request.json - payload = { - "Org-Id": headers.get('org-id', 'no value'), - "Organization-Name": headers.get('organization-name', '') or - data.get('org_name', 'no value'), - "Location-Id": headers.get('location-id', 'no value'), - "Location-Name": headers.get('location-name', '') or - data.get('location_name', 'no value'), - "Part-Id": headers.get('part-id', 'no value'), - "Part-Name": headers.get('part-name', 'no value'), - "Robot-Id": headers.get('robot-id', 'no value'), - "Machine-Name": headers.get('machine-name', '') or - data.get('machine_name', 'no value'), - "Component-Type": data.get('component_type', 'no value'), - "Component-Name": data.get('component_name', 'no value'), - "Method-Name": data.get('method_name', 'no value'), - "Min-Time-Received": data.get('min_time_received', 'no value'), - "Max-Time-Received": data.get('max_time_received', 'no value'), - "Data-Type": data.get('data_type', 'no value'), - "File-Id": data.get('file_id', 'no value'), - "Trigger-Condition": data.get("trigger_condition", 'no value'), - "Data": data.get('data', 'no value') - } - print(payload) - - return payload - - -if __name__ == '__main__': - app.run(host='0.0.0.0', port=8080) -``` - -{{% /tab %}} -{{% tab name="functions_framework" %}} - -```python {class="line-numbers linkable-line-numbers"} -import functions_framework -import requests -import time - - -@functions_framework.http -def hello_http(request): - headers = request.headers - data = {} - if request.data: - data = request.json - payload = { - "Org-Id": headers.get("org-id", "no value"), - "Organization-Name": headers.get("organization-name", "") - or data.get("org_name", "no value"), - "Location-Id": headers.get("location-id", "no value"), - "Location-Name": headers.get("location-name", "") - or data.get("location_name", "no value"), - "Part-Id": headers.get("part-id", "no value"), - "Part-Name": headers.get("part-name", "no value"), - "Robot-Id": headers.get("robot-id", "no value"), - "Machine-Name": headers.get("machine-name", "") - or data.get("machine_name", "no value"), - "Component-Type": data.get("component_type", "no value"), - "Component-Name": data.get("component_name", "no value"), - "Method-Name": data.get("method_name", "no value"), - "Min-Time-Received": data.get("min_time_received", "no value"), - "Max-Time-Received": data.get("max_time_received", "no value"), - "Data-Type": data.get("data_type", "no value"), - "File-Id": data.get('file_id', "no value"), - "Trigger-Condition": data.get("trigger_condition", "no value"), - "Data": data.get('data', "no value") - } - print(payload) - - return 'Received headers: {}'.format(payload) -``` - -{{% /tab %}} -{{< /tabs >}} - -## Returned headers - -When a trigger occurs, Viam sends a HTTP request to the URL you specified for the trigger: - - -| Trigger type | HTTP Method | -| ------------ | ----------- | -| `part_data_ingested` | POST | -| `conditional_data_ingested` | POST | -| `part_online` | GET | -| `part_offline` | GET | - -The request includes the following headers: - - -| Header Key | Description | Trigger types | -| ---------- | ----------- | ------------- | -| `Org-Id` | The ID of the organization that triggered the request. | all | -| `Organization-Name` | The name of the organization that triggered the request. | `part_online`, `part_offline` | -| `Location-Id` | The location of the machine that triggered the request. | all | -| `Location-Name` | The location of the machine that triggered the request. | `part_online`, `part_offline` | -| `Part-Id` | The part of the machine that triggered the request. | all | -| `Machine-Name` | The name of the machine that triggered the request. | `part_online`, `part_offline` | -| `Robot-Id` | The ID of the machine that triggered the request. | all | - -The request body includes the following data: - - -| Data Key | Description | Trigger types | -| -------- | ----------- | ------------- | -| `component_name` | The name of the component for which data was ingested. | `part_data_ingested`, `conditional_data_ingested` | -| `component_type` | The type of component for which data was ingested. | `part_data_ingested`, `conditional_data_ingested` | -| `method_name` | The name of the method from which data was ingested. | `part_data_ingested`, `conditional_data_ingested` | -| `min_time_received` | Indicates the earliest time a piece of data was received. | `part_data_ingested` | -| `max_time_received` | Indicates the latest time a piece of data was received. | `part_data_ingested` | -| `method_name` | The name of the method that triggered the request. | `conditional_data_ingested` | -| `machine_name` | The name of the machine that triggered the request. | `part_data_ingested`, `conditional_data_ingested` | -| `location_name` | The location of the machine that triggered the request. | `part_data_ingested`, `conditional_data_ingested` | -| `org_name` | The name of the organization that triggered the request. | `part_data_ingested`, `conditional_data_ingested` | -| `file_id` | The id of the file that was ingested. | `part_data_ingested` | -| `trigger_condition` | The condition that triggered the request. | `conditional_data_ingested` | -| `data` | The ingested sensor data. Includes `metadata` with `received_at` and `requested_at` timestamps and `data` in the form `map[string]any`. | `part_data_ingested`, `conditional_data_ingested` (sensor data) | - -## Next steps - -To see an example project that uses triggers to send email notification, see the [Monitor Job Site Helmet Usage with Computer Vision tutorial](/tutorials/projects/helmet/#configure-a-trigger-on-your-machine). - -{{< cards >}} -{{% card link="/tutorials/projects/helmet/" %}} -{{< /cards >}} diff --git a/docs/data-ai/_index.md b/docs/data-ai/_index.md new file mode 100644 index 0000000000..d04c052dc5 --- /dev/null +++ b/docs/data-ai/_index.md @@ -0,0 +1,58 @@ +--- +linkTitle: "AI & Data" +title: "Work with Data and AI" +weight: 250 +layout: "docs" +type: "docs" +no_list: true +open_on_desktop: true +overview: true +description: "Sync and store sensor data, images, and any other binary or timeseries data. Then use ML and AI to turn your data into insights and action." +--- + +Sync and store sensor data, images, and any other binary or timeseries data. Then use ML and AI to turn your data into insights and action. + + + + +
+ +{{< how-to-expand "Leverage AI" "8" "INTERMEDIATE" "" "hoverable-ai" >}} +{{< cards >}} +{{% card link="/data-ai/ai/create-dataset/" noimage="true" %}} +{{% card link="/data-ai/ai/train-tflite/" noimage="true" %}} +{{% card link="/data-ai/ai/train/" noimage="true" %}} +{{% card link="/data-ai/ai/deploy/" noimage="true" %}} +{{% card link="/data-ai/ai/run-inference/" noimage="true" %}} +{{% card link="/data-ai/ai/alert/" noimage="true" %}} +{{% card link="/data-ai/ai/act/" noimage="true" %}} +{{< /cards >}} +{{< /how-to-expand >}} + +{{< how-to-expand "Work with data" "4" "BEGINNER-FRIENDLY" "" "hoverable-work" >}} +{{< cards >}} +{{% card link="/data-ai/data/query/" noimage="true" %}} +{{% card link="/data-ai/data/visualize/" noimage="true" %}} +{{% card link="/data-ai/data/advanced/alert-data/" noimage="true" %}} +{{% card link="/data-ai/data/export/" noimage="true" %}} +{{< /cards >}} +{{< /how-to-expand >}} + +{{< how-to-expand "Capture data" "1" "BEGINNER-FRIENDLY" "" "hoverable-capture" >}} +{{< cards >}} +{{% card link="/data-ai/capture-data/capture-sync/" noimage="true" %}} +{{< /cards >}} +{{< /how-to-expand >}} + +Platform diagram with data elements highlighted +Platform diagram with data capture elements highlighted +Platform diagram with data usage elements highlighted +Platform diagram with AI elements highlighted + +
diff --git a/docs/data-ai/ai/_index.md b/docs/data-ai/ai/_index.md new file mode 100644 index 0000000000..a7a53e72e5 --- /dev/null +++ b/docs/data-ai/ai/_index.md @@ -0,0 +1,10 @@ +--- +linkTitle: "Leverage AI" +title: "Leverage AI" +weight: 300 +layout: "empty" +type: "docs" +empty_node: true +open_on_desktop: true +header_only: true +--- diff --git a/docs/data-ai/ai/act.md b/docs/data-ai/ai/act.md new file mode 100644 index 0000000000..53dea9a2d4 --- /dev/null +++ b/docs/data-ai/ai/act.md @@ -0,0 +1,157 @@ +--- +linkTitle: "Act based on inferences" +title: "Act based on inferences" +weight: 70 +layout: "docs" +type: "docs" +description: "Use the vision service API to act based on inferences." +next: "/data-ai/ai/advanced/upload-external-data/" +--- + +You can use the [vision service API](/dev/reference/apis/services/vision/) to get information about your machine's inferences and program behavior based on that. + +The following are examples of what you can do using a vision service alongside hardware: + +- [Line following robot](#program-a-line-following-robot): Using computer vision to follow objects or a pre-determined path +- [Accident prevention and quality assurance](#act-in-industrial-applications) + +## Program a line following robot + +For example, you can [program a line following robot](/tutorials/services/color-detection-scuttle/) that uses a vision service to follow a colored object. + +You can use the following code to detect and follow the location of a colored object: + +{{% expand "Click to view code" %}} + +```python {class="line-numbers linkable-line-numbers"} +async def connect(): + opts = RobotClient.Options.with_api_key( + # Replace "" (including brackets) with your machine's API key + api_key='', + # Replace "" (including brackets) with your machine's + # API key ID + api_key_id='' + ) + return await RobotClient.at_address("ADDRESS FROM THE VIAM APP", opts) + + +# Get largest detection box and see if it's center is in the left, center, or +# right third +def leftOrRight(detections, midpoint): + largest_area = 0 + largest = {"x_max": 0, "x_min": 0, "y_max": 0, "y_min": 0} + if not detections: + print("nothing detected :(") + return -1 + for d in detections: + a = (d.x_max - d.x_min) * (d.y_max-d.y_min) + if a > largest_area: + a = largest_area + largest = d + centerX = largest.x_min + largest.x_max/2 + if centerX < midpoint-midpoint/6: + return 0 # on the left + if centerX > midpoint+midpoint/6: + return 2 # on the right + else: + return 1 # basically centered + + +async def main(): + spinNum = 10 # when turning, spin the motor this much + straightNum = 300 # when going straight, spin motor this much + numCycles = 200 # run the loop X times + vel = 500 # go this fast when moving motor + + # Connect to robot client and set up components + machine = await connect() + base = Base.from_robot(machine, "my_base") + camera_name = "" + camera = Camera.from_robot(machine, camera_name) + frame = await camera.get_image(mime_type="image/jpeg") + + # Convert to PIL Image + pil_frame = viam_to_pil_image(frame) + + # Grab the vision service for the detector + my_detector = VisionClient.from_robot(machine, "my_color_detector") + + # Main loop. Detect the ball, determine if it's on the left or right, and + # head that way. Repeat this for numCycles + for i in range(numCycles): + detections = await my_detector.get_detections_from_camera(camera_name) + + answer = leftOrRight(detections, pil_frame.size[0]/2) + if answer == 0: + print("left") + await base.spin(spinNum, vel) # CCW is positive + await base.move_straight(straightNum, vel) + if answer == 1: + print("center") + await base.move_straight(straightNum, vel) + if answer == 2: + print("right") + await base.spin(-spinNum, vel) + # If nothing is detected, nothing moves + + await robot.close() + +if __name__ == "__main__": + print("Starting up... ") + asyncio.run(main()) + print("Done.") +``` + +{{% /expand%}} + +If you configured the color detector to detect red in the Viam app, your rover should detect and navigate towards any red objects that come into view of its camera. +Use something like a red sports ball or book cover as a target to follow to test your rover: + +
+{{
+ +## Act in industrial applications + +You can also act based on inferences in an industrial context. +For example, you can program a robot arm to halt operations when workers enter dangerous zones, preventing potential accidents. + +The code for this would look like: + +```python {class="line-numbers linkable-line-numbers"} +detections = await detector.get_detections_from_camera(camera_name) +for d in detections: + if d.confidence > 0.6 and d.class_name == "PERSON": + arm.stop() +``` + +You can also use inferences of computer vision for quality assurance purposes. +For example, you can program a robot arm doing automated harvesting to use vision to identify ripe produce and pick crops selectively. + +The code for this would look like: + +```python {class="line-numbers linkable-line-numbers"} +classifications = await detector.get_classifications_from_camera( + camera_name, + 4) +for c in classifications: + if d.confidence > 0.6 and d.class_name == "RIPE": + arm.pick() +``` + +To get inferences programmatically, you will want to use the vision service API: + +{{< cards >}} +{{% card link="/dev/reference/apis/services/vision/" customTitle="Vision service API" noimage="True" %}} +{{< /cards >}} + +To implement industrial solutions in code, you can also explore the following component APIs: + +{{< cards >}} +{{< card link="/dev/reference/apis/components/arm/" customTitle="Arm API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/base/" customTitle="Base API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/gripper/" customTitle="Gripper API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/motor/" customTitle="Motor API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/sensor/" customTitle="Sensor API" noimage="True" >}} +{{< /cards >}} diff --git a/docs/data-ai/ai/advanced/_index.md b/docs/data-ai/ai/advanced/_index.md new file mode 100644 index 0000000000..f004c032cf --- /dev/null +++ b/docs/data-ai/ai/advanced/_index.md @@ -0,0 +1,8 @@ +--- +linkTitle: "Advanced" +title: "Advanced" +weight: 200 +layout: "empty" +type: "docs" +empty_node: true +--- diff --git a/docs/how-tos/upload-data.md b/docs/data-ai/ai/advanced/upload-external-data.md similarity index 82% rename from docs/how-tos/upload-data.md rename to docs/data-ai/ai/advanced/upload-external-data.md index 6bfd61d612..1546e81158 100644 --- a/docs/how-tos/upload-data.md +++ b/docs/data-ai/ai/advanced/upload-external-data.md @@ -1,35 +1,27 @@ --- -title: "Upload a batch of data" -linkTitle: "Upload a batch of data" -description: "Upload data to the Viam app from your local computer or mobile device using the data client API, Viam CLI, or Viam mobile app." +linkTitle: "Upload external data" +title: "Upload external data for training" +images: ["/services/icons/data-folder.svg"] +weight: 20 +layout: "docs" type: "docs" -tags: ["data management", "cloud", "sync"] -icon: true languages: ["python"] viamresources: ["data_manager"] -platformarea: ["data"] -level: "Beginner" -date: "2024-09-05" -# updated: "" # When the tutorial was last entirely checked -cost: "0" -images: ["/services/icons/data-folder.svg"] aliases: - /data/upload/ - /services/data/upload/ + - /how-tos/upload-data/ +date: "2024-12-04" +description: "Upload data to the Viam app from your local computer or mobile device using the data client API, Viam CLI, or Viam mobile app." +prev: "/data-ai/ai/act/" --- -If you configured the [data management service](/services/data/), Viam automatically uploads data from the configured directory to the cloud, at the interval you specified. -However, if you want to upload a batch of data once from somewhere else, either from a different directory on your machine or from your personal computer or mobile device, you have several options using the Viam app, the data client API, or the Viam mobile app. +When you configure the data management service, Viam automatically uploads data from the default directory `~/.viam/capture` and any directory you configured. +If you want to upload a batch of data to train ML models on from an external source you can also: -{{% alert title="In this page" color="tip" %}} - -- [Sync a batch of data from another directory](#sync-a-batch-of-data-from-another-directory) by configuring the path to the directory as an additional sync path in a machine's data management service. - This requires the data to be on a machine running `viam-server`. -- [Upload data with the Python SDK](#upload-data-with-python) by running a Python script to upload files from a folder. - You can do this on a computer that doesn't have `viam-server` installed on it. -- [Upload images with the Viam mobile app](#upload-images-with-the-viam-mobile-app) from your mobile device. - -{{% /alert %}} +- [Sync a batch of data from another directory](#sync-a-batch-of-data-from-another-directory) +- [Upload data with Python](#upload-data-with-python) +- [Upload images from your phone with the Viam mobile app](#upload-images-with-the-viam-mobile-app) ## Sync a batch of data from another directory @@ -46,7 +38,7 @@ However, if you already have a cache of data you'd like to use with Viam, you ca {{< expand "Enable data capture and sync on your machine." >}} -Add the [data management service](/services/data/): +Add the [data management service](/data-ai/capture-data/capture-sync/#configure-the-data-management-service): On your machine's **CONFIGURE** tab, click the **+** icon next to your machine part in the left-hand menu and select **Service**. @@ -104,7 +96,7 @@ You can also turn off **Syncing** unless you have other directories you'd like t ## Upload data with Python -You can use the Python data client API [`file_upload_from_path`](/appendix/apis/data-client/#fileuploadfrompath) method to upload one or more files from your computer to the Viam Cloud. +You can use the Python data client API [`file_upload_from_path`](/dev/reference/apis/data-client/#fileuploadfrompath) method to upload one or more files from your computer to the Viam Cloud. {{% alert title="Note" color="note" %}} @@ -130,13 +122,13 @@ pip install viam-sdk ### Instructions {{< table >}} -{{% tablestep link="/appendix/apis/data-client/#establish-a-connection" %}} +{{% tablestep link="/dev/reference/apis/data-client/#establish-a-connection" %}} **1. Get API key** Go to your organization's setting page and create an API key for your individual {{< glossary_tooltip term_id="part" text="machine part" >}}, {{< glossary_tooltip term_id="part" text="machine" >}}, {{< glossary_tooltip term_id="location" text="location" >}}, or {{< glossary_tooltip term_id="organization" text="organization" >}}. {{% /tablestep %}} -{{% tablestep link="/appendix/apis/data-client/" %}} +{{% tablestep link="/dev/reference/apis/data-client/" %}} **2. Add a `file_upload_from_path` API call** Create a Python script and use the `file_upload_from_path` method to upload your data, depending on whether you are uploading one or multiple files: @@ -144,7 +136,7 @@ Create a Python script and use the `file_upload_from_path` method to upload your {{< tabs >}} {{< tab name="Upload a single file" >}} -To upload just one file, make a call to [`file_upload_from_path`](/appendix/apis/data-client/#fileuploadfrompath). +To upload just one file, make a call to [`file_upload_from_path`](/dev/reference/apis/data-client/#fileuploadfrompath). {{< expand "Click this to see example code" >}} @@ -194,7 +186,7 @@ if __name__ == "__main__": {{% /tab %}} {{< tab name="Upload all files in a directory" >}} -To upload all the files in a directory, you can use the [`file_upload_from_path`](/appendix/apis/data-client/#fileuploadfrompath) method inside a `for` loop. +To upload all the files in a directory, you can use the [`file_upload_from_path`](/dev/reference/apis/data-client/#fileuploadfrompath) method inside a `for` loop. {{< expand "Click this to see example code" >}} @@ -260,7 +252,7 @@ View your uploaded data in your [**DATA** page in the Viam app](https://app.viam ## Upload images with the Viam mobile app -Upload images as machine data straight from your phone, skipping the normal data capture and cloud synchronization process, through the [Viam mobile app](/fleet/control/#control-interface-in-the-viam-mobile-app). +Upload images as machine data straight from your phone, skipping the normal data capture and cloud synchronization process, through the [Viam mobile app](/manage/troubleshoot/teleoperate/default-interface/#viam-mobile-app). This is useful if you want to capture images for training machine learning models on the go. ### Prerequisites @@ -282,7 +274,7 @@ Install the mobile app from the [App Store](https://apps.apple.com/vn/app/viam-r ### Instructions {{< table >}} -{{% tablestep link="/services/data/" %}} +{{% tablestep link="/data-ai/capture-data/capture-sync/#configure-the-data-management-service" %}} **1. Navigate to your machine** In the Viam mobile app, select an organization by clicking on the menu icon in the top left corner and tapping an organization. @@ -306,10 +298,5 @@ However, the uploaded images will not be associated with a component or method. ## Next steps -Now that you have a batch of data uploaded, you can train an ML model on it. -Or, if you want to collect and upload data _not_ in a batch, see Capture and Sync Image Data. - -{{< cards >}} -{{% card link="/how-tos/train-deploy-ml/" %}} -{{% card link="/how-tos/image-data/" %}} -{{< /cards >}} +Now that you have a batch of data uploaded, you can [train an ML model](/data-ai/ai/train-tflite/) on it. +Or, if you want to collect and upload data _not_ in a batch, see [Create a dataset](/data-ai/ai/create-dataset/). diff --git a/docs/data-ai/ai/alert.md b/docs/data-ai/ai/alert.md new file mode 100644 index 0000000000..261b90135f --- /dev/null +++ b/docs/data-ai/ai/alert.md @@ -0,0 +1,146 @@ +--- +linkTitle: "Alert on inferences" +title: "Alert on inferences" +weight: 60 +layout: "docs" +type: "docs" +description: "Use triggers to send email notifications when inferences are made." +--- + +At this point, you should have already set up and tested [computer vision functionality](/data-ai/ai/run-inference/). +On this page, you'll learn how to use triggers to send alerts in the form of email notifications or webhook requests when certain detections or classifications are made. + +You will build a system that can monitor camera feeds and detect situations that require review. +In other words, this system performs anomaly detection. +Whenever the system detects an anomaly, it will send an email notification. + +First, you'll set up data capture and sync to record images with the anomaly and upload them to the cloud. +Next, you'll configure a trigger to send email notifications or webhook requests when the anomaly is detected. + +### Prerequisites + +{{% expand "A running machine connected to the Viam app. Click to see instructions." %}} + +{{% snippet "setup-both.md" %}} + +{{% /expand%}} + +{{< expand "A configured camera and vision service. Click to see instructions." >}} + +Follow the instructions to [configure a camera](/operate/reference/components/camera/) and [run inference](/data-ai/ai/run-inference/). + +{{< /expand >}} + +## Configure a filtered camera + +Your physical camera is working and your vision service is set up. +Now you will pull them together to filter out only images where an inference is made with the [`filtered-camera`](https://app.viam.com/module/erh/filtered-camera) {{< glossary_tooltip term_id="module" text="module" >}}. +This camera module takes the vision service and applies it to your webcam feed, filtering the output so that later, when you configure data management, you can save only the images that contain people inferred to match the filtering criteria rather than all images the camera captures. + +Configure the camera module with classification or object labels according to the labels your ML model provides that you want to alert on. +Follow the instructions in the [`filtered-camera` module readme](https://github.com/erh/filtered_camera). +For example, if using the YOLOv8 model (named `yolo`) for hardhat detection, you would configure the module like the following: + +{{% expand "Instructions for configuring the filtered-camera module to detect people without a hardhat" %}} + +1. Navigate to your machine's **CONFIGURE** tab. + +2. Click the **+** (Create) button next to your main part in the left-hand menu and select **Component**. + Start typing `filtered-camera` and select **camera / filtered-camera** from the results. + Click **Add module**. + +3. Name your filtering camera something like `objectfilter-cam` and click **Create**. + +4. Paste the following into the attributes field: + + ```json {class="line-numbers linkable-line-numbers"} + { + "camera": "my_webcam", + "vision": "yolo", + "window_seconds": 3, + "objects": { + "NO-Hardhat": 0.5 + } + } + ``` + + If you named your detector something other than "yolo," edit the `vision_services` value accordingly. + You can also edit the confidence threshold. + If you change it to `0.6` for example, the `filtered-camera` camera will only return labeled bounding boxes when the vision model indicates at least 60% confidence that the object is a hard hat or a person without a hard hat. + +5. Click **Save** in the top right corner of the screen to save your changes. + +{{% /expand%}} + +## Configure data capture and sync + +Viam's built-in [data management service](/data-ai/capture-data/capture-sync/#configure-the-data-management-service) allows you to, among other things, capture images and sync them to the cloud. + +Configure data capture on the `filtered-camera` camera to capture images of detections or classifications: + +1. First, you need to add the data management service to your machine to make it available to capture data on your camera. + + Navigate to your machine's **CONFIGURE** tab. + + Click the **+** (Create) button next to your main part in the left-hand menu and select **Service**. + Type "data" and click **data management / RDK**. + Name your data management service `data-manager` and click **Create**. + + Leave all the default data service attributes as they are and click **Save** in the top right corner of the screen to save your changes. + +2. Now you're ready to enable data capture on your detector camera. + Locate the `objectfilter-cam` panel. + +3. Click **Add method**. + Click the **Type** dropdown and select **ReadImage**. + Set the capture frequency to `0.2` images per second (equivalent to one image every 5 seconds). + You can always change the frequency to suit your use case. + Set the **MIME type** to `image/jpeg`. + +## Set up alerts + +[Triggers](/data-ai/data/advanced/alert-data/) allow you to send webhook requests or email notifications when certain events happen. + +You can use the **Data has been synced to the cloud** (`part_data_ingested`) trigger to send alerts whenever an image with an anomaly detection is synced to the cloud from your object filter camera. + +Set up the trigger with a webhook or with Viam's built-in email alerts which sends a generic email letting you know that data has been synced. + +### Configure a trigger on your machine + +Now it's time to configure a trigger so that you get an email when a person is not wearing a hard hat. + +Go to the **CONFIGURE** tab of your machine on the [Viam app](https://app.viam.com). +Click the **+** (Create) button in the left side menu and select **Trigger**. + +Name the trigger and click **Create**. + +Select trigger **Type** as **Data has been synced to the cloud** and **Data Types** as **Binary (image)**. + +{{}} + +To configure notifications, either + +- add a webhook and enter the URL of your custom cloud function +- add an email address to use Viam's built-in email notifications + +For both options also configure the time between notifications. + +Click **Save** in the top right corner of the screen to save your changes. + +{{< readfile "/static/include/webhooks.md" >}} + +## Test the whole system + +You've built all the pieces of the system and connected them together. +Now it's time to test the whole thing. + +Make sure `viam-server` is running on your machine. +Run your camera in front of what you're detecting and wait for an anomaly to appear. +Wait a couple of minutes for the email to arrive in your inbox. +Congratulations, you've successfully built your anomaly detection monitor! + +## Troubleshooting + +### Test the vision service + +To see the detections or classifications occurring in real time and verify if their confidence level reaches the threshold you have set, you can navigate to the vision service card and expand the **TEST** panel. diff --git a/docs/data-ai/ai/create-dataset.md b/docs/data-ai/ai/create-dataset.md new file mode 100644 index 0000000000..a9fa4b5e22 --- /dev/null +++ b/docs/data-ai/ai/create-dataset.md @@ -0,0 +1,339 @@ +--- +linkTitle: "Create a dataset" +title: "Create a dataset" +weight: 10 +layout: "docs" +type: "docs" +description: "Create a dataset to train a machine learning model." +aliases: + - /fleet/dataset/ + - /manage/data/label/ + - /manage/data/dataset/ + - /data/dataset/ +--- + +To ensure a machine learning model you create performs well, you need to train it on a variety of images that cover the range of things your machine should be able to recognize. + +This page will walk you through labeling images for machine learning and creating a dataset with them. + +{{% expand "Just testing and want a dataset to get started with? Click here." %}} + +We have two datasets you can use for testing, one with shapes and the other with a wooden figure: + +{{}} + +{{< imgproc src="/tutorials/filtered-camera-module/viam-figure-dataset.png" style="width:400px" alt="The datasets subtab of the data tab in the Viam app, showing a custom 'viam-figure' dataset of 25 images, most containing the wooden Viam figure" class="imgzoom fill aligncenter" resize="1400x" >}} + +1. [Download the shapes dataset](https://storage.googleapis.com/docs-blog/dataset-shapes.zip) or [download the wooden figure dataset](https://storage.googleapis.com/docs-blog/dataset-figure.zip). +1. Unzip the download. +1. Open a terminal and go to the dataset folder. +1. Create a python script in the dataset's folder with the following contents: + + ```python {class="line-numbers linkable-line-numbers"} + # Assumption: The dataset was exported using the `viam dataset export` command. + # This script is being run from the `destination` directory. + + import asyncio + import os + import json + import argparse + + from viam.rpc.dial import DialOptions, Credentials + from viam.app.viam_client import ViamClient + from viam.proto.app.data import BinaryID + + async def connect(args) -> ViamClient: + dial_options = DialOptions( + credentials=Credentials( + type="api-key", + payload=args.api_key, + ), + auth_entity=args.api_key_id + ) + return await ViamClient.create_from_dial_options(dial_options) + + + async def main(): + parser = argparse.ArgumentParser( + description='Upload images, metadata, and tags to a new dataset') + parser.add_argument('-org-id', dest='org_id', action='store', + required=True, help='Org Id') + parser.add_argument('-api-key', dest='api_key', action='store', + required=True, help='API KEY with org admin access') + parser.add_argument('-api-key-id', dest='api_key_id', action='store', + required=True, help='API KEY ID with org admin access') + parser.add_argument('-machine-part-id', dest='machine_part_id', + action='store', required=True, + help='Machine part id for image metadata') + parser.add_argument('-location-id', dest='location_id', action='store', + required=True, help='Location id for image metadata') + parser.add_argument('-dataset-name', dest='dataset_name', action='store', + required=True, + help='Name of the data to create and upload to') + args = parser.parse_args() + + + # Make a ViamClient + viam_client = await connect(args) + # Instantiate a DataClient to run data client API methods on + data_client = viam_client.data_client + + # Create dataset + try: + dataset_id = await data_client.create_dataset( + name=args.dataset_name, + organization_id=args.org_id + ) + print("Created dataset: " + dataset_id) + except Exception: + print("Error. Check that the dataset name does not already exist.") + print("See: https://app.viam.com/data/datasets") + return 1 + + file_ids = [] + + for file_name in os.listdir("metadata/"): + with open("metadata/" + file_name) as f: + data = json.load(f) + tags = None + if "tags" in data["captureMetadata"].keys(): + tags = data["captureMetadata"]["tags"] + + annotations = None + if "annotations" in data.keys(): + annotations = data["annotations"] + + image_file = data["fileName"] + + print("Uploading: " + image_file) + + id = await data_client.file_upload_from_path( + part_id=args.machine_part_id, + tags=tags, + filepath=os.path.join("data/", image_file) + ) + print("FileID: " + id) + + binary_id = BinaryID( + file_id=id, + organization_id=args.org_id, + location_id=args.location_id + ) + + if annotations: + bboxes = annotations["bboxes"] + for box in bboxes: + await data_client.add_bounding_box_to_image_by_id( + binary_id=binary_id, + label=box["label"], + x_min_normalized=box["xMinNormalized"], + y_min_normalized=box["yMinNormalized"], + x_max_normalized=box["xMaxNormalized"], + y_max_normalized=box["yMaxNormalized"] + ) + + file_ids.append(binary_id) + + await data_client.add_binary_data_to_dataset_by_ids( + binary_ids=file_ids, + dataset_id=dataset_id + ) + print("Added files to dataset.") + print("https://app.viam.com/data/datasets?id=" + dataset_id) + + viam_client.close() + + if __name__ == '__main__': + asyncio.run(main()) + ``` + +1. Run the script to upload the images and their metadata into a dataset in Viam app providing the following input: + + ```sh {class="command-line" data-prompt="$" } + python upload_data.py -org-id -api-key \ + -api-key-id -machine-part-id \ + -location-id -dataset-name + ``` + +1. Continue to [Train a tflite machine learning model](/data-ai/ai/train-tflite/). + +{{% /expand%}} + +## Prerequisites + +{{< expand "At least 10 captured images of what you want your machine to recognize." >}} + +[Capture and sync image data](/data-ai/capture-data/capture-sync/) using the data management service. + +When training machine learning models, it is important to supply a variety of images. +The dataset you create should represent the possible range of visual input. +This may include capturing images of different angles, different configurations of objects and different lighting conditions. +The more varied the provided dataset, the more accurate the resulting model becomes. + +{{< /expand >}} + +## Tips on improving model accuracy + +- **More data means better models:** Incorporate as much data as you practically can to improve your model’s overall performance. +- **Include counterexamples:** Include images with and without the object you’re looking to classify. + This helps the model distinguish the target object from the background and reduces the chances of false positives by teaching it what the object is not. +- **Avoid class imbalance:** Don’t train excessively on one specific type or class, make sure each category has a roughly equal number of images. + For instance, if you're training a dog detector, include images of various dog breeds to avoid bias towards one breed. + An imbalanced dataset can lead the model to favor one class over others, reducing its overall accuracy. +- **Match your training images to your intended use case:** Use images that reflect the quality and conditions of your production environment. + For example, if you plan to use a low-quality camera in production, train with low-quality images. + Similarly, if your model will run all day, capture images in both daylight and nighttime conditions. +- **Vary your angles and distances:** Include image examples from every angle and distance that the model will see in normal use. +- **Ensure labeling accuracy:** Make sure the labels or bounding box annotations you give are accurate. + +## Label your images + +Once you have enough images, you can disable data capture to [avoid incurring fees](https://www.viam.com/product/pricing) for capturing large amounts of training data. + +Then use the interface on the [**DATA** tab](https://app.viam.com/data/view) to label your images. + +Most use cases fall into one of two categories: + +- Detecting certain objects and their location within an image. + For example, you may wish to know where and how many `pizzas` there are in an image. + In this case, add a label for each object you would like to detect. + +{{< expand "For instructions to add labels, click here." >}} +To add a label, click on an image and select the **Bounding box** mode in the menu that opens. +Choose an existing label or create a new label. +Click on the image where you would like to add the bounding box and drag to where the bounding box should end. + +{{}} + +To expand the image, click on the expand side menu arrow in the corner of the image: + +{{}} + +Repeat this with all images. + +You can add one or more bounding boxes for objects in each image. +{{< /expand >}} + +- Classifying an image as a whole. + In other words, determining a descriptive state about an image. + For example, you may wish to know whether an image of a food display is `full`, `empty`, or `average` or whether the quality of manufacturing output is `good` or `bad`. + In this case, add tags to describe your images. + +{{< expand "For instructions to add tags, click here." >}} +To tag an image, click on an image and select the **Image tags** mode in the menu that opens. +Add one or more tags to your image. + +{{}} + +If you want to expand the image, click on the expand side menu arrow in the corner of the image. + +Repeat this with all images. +{{< /expand >}} + +## Organize data into a dataset + +To train a model, your images must be in a dataset. + +Use the interface on the **DATA** tab to add your labeled images to a dataset. + +Also add any unlabelled images to your dataset. +Unlabelled images must not comprise more than 20% of your dataset. +If you have 25 images in your dataset, at least 20 of those must be labelled. + +{{}} + +{{< expand "Want to add images to a dataset programmatically? Click here." >}} + +You can also add all images with a certain label to a dataset using the [`viam dataset data add` command](/dev/tools/cli/#dataset) or the [Data Client API](/dev/reference/apis/data-client/#addtagstobinarydatabyfilter): + +{{< tabs >}} +{{% tab name="CLI" %}} + +```sh {class="command-line" data-prompt="$"} +viam dataset create --org-id= --name= +viam dataset data add filter --dataset-id= --tags=red_star,blue_square +``` + +{{% /tab %}} +{{< tab name="Data Client API" >}} + +You can run this script to add all images from your machine to a dataset: + +```python {class="line-numbers linkable-line-numbers" data-line="14,18,30" } +import asyncio + +from viam.rpc.dial import DialOptions, Credentials +from viam.app.viam_client import ViamClient +from viam.utils import create_filter +from viam.proto.app.data import BinaryID + + +async def connect() -> ViamClient: + dial_options = DialOptions( + credentials=Credentials( + type="api-key", + # Replace "" (including brackets) with your machine's API key + payload='', + ), + # Replace "" (including brackets) with your machine's + # API key ID + auth_entity='' + ) + return await ViamClient.create_from_dial_options(dial_options) + + +async def main(): + # Make a ViamClient + viam_client = await connect() + # Instantiate a DataClient to run data client API methods on + data_client = viam_client.data_client + + # Replace "" (including brackets) with your machine's part id + my_filter = create_filter(part_id="") + + print("Getting data for part...") + binary_metadata, _, _ = await data_client.binary_data_by_filter( + my_filter, + include_binary_data=False + ) + my_binary_ids = [] + + for obj in binary_metadata: + my_binary_ids.append( + BinaryID( + file_id=obj.metadata.id, + organization_id=obj.metadata.capture_metadata.organization_id, + location_id=obj.metadata.capture_metadata.location_id + ) + ) + print("Creating dataset...") + # Create dataset + try: + dataset_id = await data_client.create_dataset( + name="MyDataset", + organization_id=ORG_ID + ) + print("Created dataset: " + dataset_id) + except Exception: + print("Error. Check that the dataset name does not already exist.") + print("See: https://app.viam.com/data/datasets") + return 1 + + print("Adding data to dataset...") + await data_client.add_binary_data_to_dataset_by_ids( + binary_ids=my_binary_ids, + dataset_id=dataset_id + ) + print("Added files to dataset.") + print("See dataset: https://app.viam.com/data/datasets?id=" + dataset_id) + + viam_client.close() + +if __name__ == '__main__': + asyncio.run(main()) +``` + +{{% /tab %}} +{{< /tabs >}} + +{{% /expand%}} diff --git a/docs/data-ai/ai/deploy.md b/docs/data-ai/ai/deploy.md new file mode 100644 index 0000000000..632caf0898 --- /dev/null +++ b/docs/data-ai/ai/deploy.md @@ -0,0 +1,70 @@ +--- +linkTitle: "Deploy model" +title: "Deploy a model" +weight: 40 +layout: "docs" +type: "docs" +modulescript: true +description: "The Machine Learning (ML) model service allows you to deploy machine learning models to your machine." +aliases: + - /how-tos/train-deploy-ml/ + - /services/ml/ +--- + +The Machine Learning (ML) model service allows you to deploy [machine learning models](/data-ai/ai/deploy/#deploy-your-ml-model) to your machine. +The service works with models trained inside and outside the Viam app: + +- You can [train TFlite](/data-ai/ai/train-tflite/) or [other models](data-ai/ai/train/) on data from your machines. +- You can upload externally trained models on the [**MODELS** tab](https://app.viam.com/data/models) in the **DATA** section of the Viam app. +- You can use [ML models](https://app.viam.com/registry?type=ML+Model) from the [Viam Registry](https://app.viam.com/registry). +- You can use a [model](/data-ai/ai/deploy/#deploy-your-ml-model) trained outside the Viam platform whose files are on your machine. + +## Deploy your ML model + +Navigate to the **CONFIGURE** tab of one of your machine in the [Viam app](https://app.viam.com). +Add an ML model service that supports the ML model you trained or the one you want to use from the registry. + +{{}} + +### Model framework support + +Viam currently supports the following frameworks: + + +| Model Framework | ML Model Service | Hardware Support | Description | +| --------------- | --------------- | ------------------- | ----------- | +| [TensorFlow Lite](https://www.tensorflow.org/lite) | [`tflite_cpu`](https://github.com/viam-modules/mlmodel-tflite) | linux/amd64, linux/arm64, darwin/arm64, darwin/amd64 | Quantized version of TensorFlow that has reduced compatibility for models but supports more hardware. Uploaded models must adhere to the [model requirements.](https://github.com/viam-modules/mlmodel-tflite) | +| [ONNX](https://onnx.ai/) | [`onnx-cpu`](https://github.com/viam-labs/onnx-cpu), [`triton`](https://github.com/viamrobotics/viam-mlmodelservice-triton) | Nvidia GPU, linux/amd64, linux/arm64, darwin/arm64 | Universal format that is not optimized for hardware inference but runs on a wide variety of machines. | +| [TensorFlow](https://www.tensorflow.org/) | [`tensorflow-cpu`](https://github.com/viam-modules/tensorflow-cpu), [`triton`](https://github.com/viamrobotics/viam-mlmodelservice-triton) | Nvidia GPU, linux/amd64, linux/arm64, darwin/arm64 | A full framework that is made for more production-ready systems. | +| [PyTorch](https://pytorch.org/) | [`torch-cpu`](https://github.com/viam-modules/torch), [`triton`](https://github.com/viamrobotics/viam-mlmodelservice-triton) | Nvidia GPU, linux/arm64, darwin/arm64 | A full framework that was built primarily for research. Because of this, it is much faster to do iterative development with (model doesn’t have to be predefined) but it is not as “production ready” as TensorFlow. It is the most common framework for OSS models because it is the go-to framework for ML researchers. | + +{{< alert title="Note" color="note" >}} +For some models of the ML model service, like the [Triton ML model service](https://github.com/viamrobotics/viam-mlmodelservice-triton/) for Jetson boards, you can configure the service to use either the available CPU or a dedicated GPU. +{{< /alert >}} + +For example,use the `ML model / TFLite CPU` service for TFlite ML models. +If you used the built-in training, this is the ML model service you need to use. +If you used a custom training script, you may need a different ML model service. + +To deploy a model, click **Select model** and select the model from your organization or the registry. +Save your config. + +### Machine learning models from registry + +You can search the machine learning models that are available to deploy on this service from the registry here: + +{{}} + +## Next steps + +On its own the ML model service only runs the model. +After deploying your model, you need to configure an additional service to use the deployed model. +For example, you can configure an [`mlmodel` vision service](/operate/reference/services/vision/) to visualize the inferences your model makes. +Follow our docs to [run inference](/data-ai/ai/run-inference/) to add an `mlmodel` vision service and see inferences. + +For other use cases, consider [creating custom functionality with a module](/operate/get-started/other-hardware/). + +{{< alert title="Add support for other models" color="tip" >}} +ML models must be designed in particular shapes to work with the `mlmodel` [classification](/operate/reference/services/vision/mlmodel/) or [detection](/operate/reference/services/vision/mlmodel/) model of Viam's [vision service](/operate/reference/services/vision/). +See [ML Model Design](/registry/advanced/mlmodel-design/) to design a modular ML model service with models that work with vision. +{{< /alert >}} diff --git a/docs/data-ai/ai/run-inference.md b/docs/data-ai/ai/run-inference.md new file mode 100644 index 0000000000..b3631f7a24 --- /dev/null +++ b/docs/data-ai/ai/run-inference.md @@ -0,0 +1,106 @@ +--- +linkTitle: "Run inference" +title: "Run inference on a model" +weight: 50 +layout: "docs" +type: "docs" +modulescript: true +aliases: + - /how-tos/detect-people/ + - /how-tos/detect-color/ +description: "Run inference on a model with a vision service or an SDK." +--- + +After deploying an ml model, you need to configure an additional service to use the inferences the deployed model makes. +You can run inference on an ML model with a vision service or use an SDK to further process inferences. + +## Use a vision service + +Vision services work to provide computer vision. +They use an ML model and apply it to the stream of images from your camera. + +{{}} + +{{< readfile "/static/include/create-your-own-mr.md" >}} + +Note that many of these services have built in ML models, and thus do not need to be run alongside an ML model service. + +One vision service you can use to run inference on a camera stream if you have an ML model service configured is the `mlmodel` service. + +### Configure an mlmodel vision service + +Add the `vision / ML model` service to your machine. +Then, from the **Select model** dropdown, select the name of the ML model service you configured when [deploying](/data-ai/ai/deploy/) your model (for example, `mlmodel-1`). + +**Save** your changes. + +### Test your changes + +You can test a deployed vision service by clicking on the **Test** area of its configuration panel or from the [**CONTROL** tab](/manage/troubleshoot/teleoperate/default-interface/#viam-app). + +The camera stream shows when the vision service identifies something. +Try pointing the camera at a scene similar to your training data. + +{{< imgproc src="/tutorials/data-management/blue-star.png" alt="Detected blue star" resize="x200" >}} +{{< imgproc src="/tutorials/filtered-camera-module/viam-figure-preview.png" alt="Detection of a viam figure with a confidence score of 0.97" resize="x200" >}} + +{{% expand "Want to limit the number of shown classifications or detections? Click here." %}} + +If you are seeing a lot of classifications or detections, you can set a minimum confidence threshold. + +Start by setting the value to 0.8. +This reduces your output by filtering out anything below a threshold of 80% confidence. +You can adjust this attribute as necessary. + +Click the **Save** button in the top right corner of the page to save your configuration, then close and reopen the **Test** panel of the vision service configuration panel. +Now if you reopen the panel, you will only see classifications or detections with a confidence value higher than the `default_minimum_confidence` attribute. + +{{< /expand>}} + +For more detailed information, including optional attribute configuration, see the [`mlmodel` docs](/operate/reference/services/vision/mlmodel/). + +## Use an SDK + +You can also run inference using a Viam SDK. +You can use the [`Infer`](/dev/reference/apis/services/ml/#infer) +method of the ML Model API to make inferences. + +For example: + +{{< tabs >}} +{{% tab name="Python" %}} + +```python {class="line-numbers linkable-line-numbers"} +import numpy as np + +my_mlmodel = MLModelClient.from_robot(robot=machine, name="my_mlmodel_service") + +image_data = np.zeros((1, 384, 384, 3), dtype=np.uint8) + +# Create the input tensors dictionary +input_tensors = { + "image": image_data +} + +output_tensors = await my_mlmodel.infer(input_tensors) +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go {class="line-numbers linkable-line-numbers"} +input_tensors := ml.Tensors{"0": tensor.New(tensor.WithShape(1, 2, 3), tensor.WithBacking([]int{1, 2, 3, 4, 5, 6}))} + +output_tensors, err := myMLModel.Infer(context.Background(), input_tensors) +``` + +{{% /tab %}} +{{< /tabs >}} + +After adding a vision service, you can use a vision service API method with a classifier or a detector to get inferences programmatically. +For more information, see the ML Model and Vision APIs: + +{{< cards >}} +{{< card link="/dev/reference/apis/services/ml/" customTitle="ML Model API" noimage="True" >}} +{{% card link="/dev/reference/apis/services/vision/" customTitle="Vision service API" noimage="True" %}} +{{< /cards >}} diff --git a/docs/data-ai/ai/train-tflite.md b/docs/data-ai/ai/train-tflite.md new file mode 100644 index 0000000000..bc05de95df --- /dev/null +++ b/docs/data-ai/ai/train-tflite.md @@ -0,0 +1,186 @@ +--- +linkTitle: "Train TFlite model" +title: "Train a TFlite model" +weight: 20 +type: "docs" +tags: ["vision", "data", "services"] +images: ["/services/ml/train.svg"] +description: "Use your image data to train a model, so your machines can make inferences about their environments." +aliases: + - /use-cases/deploy-ml/ + - /manage/ml/train-model/ + - /ml/train-model/ + - /services/ml/train-model/ + - /tutorials/data-management-tutorial/ + - /tutorials/data-management/ + - /data-management/data-management-tutorial/ + - /tutorials/services/data-management-tutorial/ + - /tutorials/services/data-mlmodel-tutorial/ + - /tutorials/projects/filtered-camera/ + - /how-tos/deploy-ml/ + - /how-tos/train-deploy-ml/ +languages: [] +viamresources: ["data_manager", "mlmodel", "vision"] +platformarea: ["ml"] +date: "2024-12-03" +--- + +Many machines have cameras through which they can monitor their environment. +With machine leaning, you can train models on patterns within that visual data. +You can collect data from the camera stream and label any patterns within the images. + +If a camera is pointed at a food display, for example, you can label the image of the display with `full` or `empty`, or label items such as individual `pizza_slice`s. + +Using a model trained on such images, machines can make inferences about their environments. +Your machines can then automatically trigger alerts or perform other actions. +If a food display is empty, the machine could, for example, alert a supervisor to restock the display. + +Common use cases for this are **quality assurance** and **health and safety** applications. + +Follow this guide to use your image data to train an ML model, so that your machine can make inferences about its environment. + +## Prerequisites + +{{% expand "A running machine connected to the Viam app. Click to see instructions." %}} + +{{% snippet "setup.md" %}} + +{{% /expand%}} + +{{% expand "A dataset with labels. Click to see instructions." %}} + +Follow the guide to [create a dataset](/data-ai/ai/create-dataset/) if you haven't already. + +{{% /expand%}} + +{{% expand "A configured camera. Click to see instructions." %}} + +First, connect the camera to your machine's computer if it's not already connected (like with an inbuilt laptop webcam). + +Then, navigate to the **CONFIGURE** tab of your machine's page in the [Viam app](https://app.viam.com). +Click the **+** icon next to your machine part in the left-hand menu and select **Component**. +The `webcam` model supports most USB cameras and inbuilt laptop webcams. +You can find additional camera models in the [camera configuration](/operate/reference/components/camera/#configuration) documentation. + +Complete the camera configuration and use the **TEST** panel in the configuration card to test that the camera is working. + +{{% /expand%}} + +{{% expand "No computer or webcam?" %}} + +No problem. +You don't need to buy or own any hardware to complete this guide. + +Use [Try Viam](https://app.viam.com/try) to borrow a rover free of cost online. +The rover already has `viam-server` installed and is configured with some components, including a webcam. + +Once you have borrowed a rover, go to its **CONTROL** tab where you can view camera streams and also drive the rover. +You should have a front-facing camera and an overhead view of your rover. +Now you know what the rover can perceive. + +To change what the front-facing camera is pointed at, find the **cam** camera panel on the **CONTROL** tab and click **Toggle picture-in-picture** so you can continue to view the camera stream. +Then, find the **viam_base** panel and drive the rover around. + +Now that you have seen that the cameras on your Try Viam rover work, begin by [Creating a dataset and labeling data](/data-ai/ai/create-dataset/). +You can drive the rover around as you capture data to get a variety of images from different angles. + +{{< alert title="Tip" color="tip" >}} +Be aware that if you are running out of time during your rental, you can extend your rover rental as long as there are no other reservations. +{{< /alert >}} + +{{% /expand%}} + +## Train a machine learning (ML) model + +Now that you have a dataset with your labeled images, you are ready to train a machine learning model. + +{{< table >}} +{{% tablestep %}} +**1. Train an ML model** + +In the Viam app, navigate to your list of [**DATASETS**](https://app.viam.com/data/datasets) and select the one you want to train on. + +Click **Train model** and follow the prompts. + +You can train a TFLite model using **Built-in training**. + +Click **Next steps**. + +{{}} + +{{% /tablestep %}} +{{% tablestep %}} +**2. Fill in the details for your ML model** + +Enter a name for your new model. + +Select a **Task Type**: + +- **Single Label Classification**: The resulting model predicts one of the selected labels or `UNKNOWN` per image. + Select this if you only have one label on each image. Ensure that the dataset you are training on also contains unlabeled images. +- **Multi Label Classification**: The resulting model predicts one or more of the selected labels per image. +- **Object Detection**: The resulting model predicts either no detected objects or any number of object labels alongside their locations per image. + +Select the labels you want to train your model on from the **Labels** section. Unselected labels will be ignored, and will not be part of the resulting model. + +Click **Train model**. + +{{< imgproc src="/tutorials/data-management/train-model.png" alt="The data tab showing the train a model pane" style="width:500px" resize="1200x" class="imgzoom fill aligncenter" >}} + +{{% /tablestep %}} +{{% tablestep %}} +**3. Wait for your model to train** + +The model now starts training and you can follow its process on the [**TRAINING** tab](https://app.viam.com/training). + +Once the model has finished training, it becomes visible on the [**MODELS** tab](https://app.viam.com/data/models). + +You will receive an email when your model finishes training. + +{{% /tablestep %}} +{{% tablestep %}} +**4. Debug your training job** + +From the [**TRAINING** tab](https://app.viam.com/training), click on your training job's ID to see its logs. + +{{< alert title="Note" color="note" >}} + +Your training script may output logs at the error level but still succeed. + +{{< /alert >}} + +You can also view your training jobs' logs with the [`viam train logs`](/dev/tools/cli/#train) command. + +{{% /tablestep %}} +{{< /table >}} + +## Test your ML model + +{{}} + +Once your model has finished training, you can test it. + +Ideally, you want your ML model to be able to work with a high level of confidence. +As you test it, if you notice faulty predictions or confidence scores, you will need to adjust your dataset and retrain your model. + +If you trained a classification model, you can test it with the following instructions. +If you trained a detection model, move on to [deploy an ML model](/data-ai/ai/deploy/). + +1. Navigate to the [**DATA** tab](https://app.viam.com/data/view) and click on the **Images** subtab. +1. Click on an image to open the side menu, and select the **Actions** tab. +1. In the **Run model** section, select your model and specify a confidence threshold. +1. Click **Run model** + +If the results exceed the confidence threshold, the **Run model** section shows a label and the responding confidence threshold. + +## Next steps + +Now your machine can make inferences about its environment. The next step is to [act](/data-ai/ai/act/) or [alert](/data-ai/ai/alert/) based on these inferences. + +See the following tutorials for examples of using machine learning models to make your machine do things based on its inferences about its environment: + +{{< cards >}} +{{% card link="/tutorials/projects/helmet/" %}} +{{% card link="/tutorials/services/color-detection-scuttle/" %}} +{{% card link="/tutorials/projects/pet-treat-dispenser/" customTitle="Smart Pet Feeder" %}} +{{< /cards >}} diff --git a/docs/how-tos/create-custom-training-scripts.md b/docs/data-ai/ai/train.md similarity index 91% rename from docs/how-tos/create-custom-training-scripts.md rename to docs/data-ai/ai/train.md index 1b7eeebff1..2a26480edf 100644 --- a/docs/how-tos/create-custom-training-scripts.md +++ b/docs/data-ai/ai/train.md @@ -1,43 +1,33 @@ --- -title: "Train models with any machine learning frameworks" -linkTitle: "Train models with custom specs" -weight: 90 -type: "docs" +linkTitle: "Train other models" +title: "Train other models" tags: ["data management", "ml", "model training"] -description: "If you want to train models to custom specifications, write a custom training script and upload it to the Viam Registry." -# SME: Tahiya S. -images: ["/services/icons/ml.svg"] -icon: true +weight: 30 +layout: "docs" +type: "docs" aliases: - /services/ml/upload-training-script/ + - /how-tos/create-custom-training-scripts/ languages: ["python"] viamresources: ["mlmodel", "data_manager"] platformarea: ["ml"] -level: "Advanced" -date: "2024-08-29" -updated: "2024-10-20" # When the tutorial was last entirely checked -cost: "0" +description: "If you want to train models to custom specifications, write a custom training script and upload it to the Viam Registry." +date: "2024-12-04" --- You can create custom Python training scripts that train ML models to your specifications using PyTorch, Tensorflow, TFLite, ONNX, or any other Machine Learning framework. Once you upload a training script to the [Viam Registry](https://app.viam.com/registry?type=Training+Script), you can use it to build ML models in the Viam Cloud based on your datasets. -{{< alert title="In this page" color="tip" >}} - -1. [Create a training script](#create-a-training-script) from a template. -1. [Test your training script locally](#test-your-training-script-locally) with a downloaded dataset. -1. [Upload your training script](#upload-your-training-script). -1. [Submit a training job](#submit-a-training-job) that uses the training script on a dataset to train a new ML model. - -{{< /alert >}} +You can also use training scripts that are in the registry already. +If you wish to do this, skip to [Submit a training job](#submit-a-training-job). ## Prerequisites {{% expand "A dataset with data you can train an ML model on. Click to see instructions." %}} -For image data, you can follow the instructions to [Create a dataset and label data](/how-tos/train-deploy-ml/#create-a-dataset-and-label-data) to create a dataset. +For image data, you can follow the instructions to [Create a dataset](/data-ai/ai/create-dataset/) to create a dataset and label data. -For other data you can use the [Data Client API](/appendix/apis/data-client/) from within the training script to get data stored in the Viam Cloud. +For other data you can use the [Data Client API](/dev/reference/apis/data-client/) from within the training script to get data stored in the Viam Cloud. {{% /expand%}} @@ -424,7 +414,7 @@ Update the main to call the functions you have just created. {{% tablestep %}} **9. Using Viam APIs in a training script** -If you need to access any of the [Viam APIs](/appendix/apis/) within a custom training script, you can use the environment variables `API_KEY` and `API_KEY_ID` to establish a connection. +If you need to access any of the [Viam APIs](/dev/reference/apis/) within a custom training script, you can use the environment variables `API_KEY` and `API_KEY_ID` to establish a connection. These environment variables will be available to training scripts. ```python @@ -452,7 +442,7 @@ You can export one of your Viam datasets to test your training script locally. {{% tablestep %}} **1. Export your dataset** -You can get the dataset ID from the dataset page or using the [`viam dataset list`](/cli/#dataset) command: +You can get the dataset ID from the dataset page or using the [`viam dataset list`](/dev/tools/cli/#dataset) command: ```sh {class="command-line" data-prompt="$"} viam dataset export --destination= --dataset-id= --include-jsonl=true @@ -524,7 +514,7 @@ viam training-script upload --path=my-training.tar.gz \ {{% /tab %}} {{< /tabs >}} -You can also [specify the version, framework, type, visibility, and description](/cli/#training-script) when uploading a custom training script. +You can also [specify the version, framework, type, visibility, and description](/dev/tools/cli/#training-script) when uploading a custom training script. To find your organization's ID, run the following command: @@ -540,7 +530,7 @@ You can view uploaded training scripts by navigating to the [registry's **Traini ## Submit a training job -After uploading the training script, you can run it by submitting a training job through the Viam app or using the Viam CLI or [ML Training client API](/appendix/apis/ml-training-client/#submittrainingjob). +After uploading the training script, you can run it by submitting a training job through the Viam app or using the Viam CLI or [ML Training client API](/dev/reference/apis/ml-training-client/#submittrainingjob). {{< table >}} {{% tablestep %}} @@ -558,7 +548,7 @@ Click **Train model** and select **Train on a custom training script**, then fol {{% /tab %}} {{% tab name="CLI" %}} -You can use [`viam train submit custom from-registry`](/cli/#positional-arguments-submit) to submit a training job. +You can use [`viam train submit custom from-registry`](/dev/tools/cli/#positional-arguments-submit) to submit a training job. For example: @@ -572,7 +562,7 @@ viam train submit custom from-registry --dataset-id= \ This command submits a training job to the previously uploaded `MyCustomTrainingScript` with another input dataset, which trains `MyRegistryModel` and publishes that to the registry. -You can get the dataset id from the dataset page or using the [`viam dataset list`](/cli/#dataset) command. +You can get the dataset id from the dataset page or using the [`viam dataset list`](/dev/tools/cli/#dataset) command. {{% /tab %}} {{< /tabs >}} @@ -605,20 +595,10 @@ Your training script may output logs at the error level but still succeed. {{< /alert >}} -You can also view your training jobs' logs with the [`viam train logs`](/cli/#train) command. +You can also view your training jobs' logs with the [`viam train logs`](/dev/tools/cli/#train) command. {{% /tablestep %}} {{< /table >}} -## Next steps - -To use your new model with machines, you must deploy it with the [ML model service](/services/ml/). -Then you can use another service, such as the vision service, to apply the deployed model to camera feeds. - -To see models in use with machines, see one of the following resources: - -{{< cards >}} -{{% card link="/how-tos/detect-people/" %}} -{{% card link="/tutorials/projects/helmet/" %}} -{{% card link="/tutorials/projects/integrating-viam-with-openai/" %}} -{{< /cards >}} +To use your new model with machines, you must [deploy it](/data-ai/ai/deploy/) with the appropriate ML model service. +Then you can use another service, such as the vision service, to [run inference](/data-ai/ai/run-inference/). diff --git a/docs/data-ai/capture-data/_index.md b/docs/data-ai/capture-data/_index.md new file mode 100644 index 0000000000..58db67e0cc --- /dev/null +++ b/docs/data-ai/capture-data/_index.md @@ -0,0 +1,10 @@ +--- +linkTitle: "Capture data" +title: "Capture data" +weight: 100 +layout: "empty" +type: "docs" +empty_node: true +open_on_desktop: true +header_only: true +--- diff --git a/docs/data-ai/capture-data/advanced/_index.md b/docs/data-ai/capture-data/advanced/_index.md new file mode 100644 index 0000000000..f004c032cf --- /dev/null +++ b/docs/data-ai/capture-data/advanced/_index.md @@ -0,0 +1,8 @@ +--- +linkTitle: "Advanced" +title: "Advanced" +weight: 200 +layout: "empty" +type: "docs" +empty_node: true +--- diff --git a/docs/data-ai/capture-data/advanced/how-sync-works.md b/docs/data-ai/capture-data/advanced/how-sync-works.md new file mode 100644 index 0000000000..b5b877b76e --- /dev/null +++ b/docs/data-ai/capture-data/advanced/how-sync-works.md @@ -0,0 +1,80 @@ +--- +linkTitle: "How sync works" +title: "How sync works" +tags: ["data management", "data", "services"] +weight: 12 +layout: "docs" +type: "docs" +platformarea: ["data"] +description: "Data sync works differently for viam-server and viam-micro-server." +date: "2024-12-18" +prev: "/data-ai/capture-data/conditional-sync/" +--- + +Data capture and cloud sync works differently for `viam-server` and `viam-micro-server`. + +{{< tabs >}} +{{% tab name="viam-server" %}} + +The data is captured locally on the machine's storage and, by default, stored in the `~/.viam/capture` directory. + +If a machine restarts for any reason, capture automatically resumes and any data already stored but not yet synced is synced. + +The service can capture data from multiple resources at the same or different frequencies. +The service does not impose a lower or upper limit on the frequency of data collection. +However, in practice, your hardware may impose limits on the frequency of data collection. +Avoid configuring data capture to higher rates than your hardware can handle, as this could lead to performance degradation. + +Data capture is frequently used with cloud sync. +You can start and stop capture and sync independently. +You can also enable cloud sync without data capture and it will sync data in either the sync directory or in the additional sync paths configured in the `viam-server` config. +If a file in either the data capture directory or any of the sync paths ends with .capture it will be treated as a file with data that was captured by data capture. +If it ends in .prog it will be ignored as this extension is used by data capture to denote that a file is a capture file that's currently being written to. +If a file is in the capture directory or one of its descendants or any of the additional sync paths or their descendants and doesn't have .prog nor .capture extensions, it will be treated as an arbitrary file and, like .capture files, synced if data sync is enabled and the robot is able to connect to [the Viam app](https://app.viam.com). + +{{% /tab %}} +{{% tab name="viam-micro-server" %}} + +The data is captured in the ESP32's flash memory until it is uploaded to the Viam Cloud. + +If the machine restarts before all data is synced, all unsynced data captured since the last sync point is lost. + +The service can capture data from multiple resources at the same or different frequencies. +The service does not impose a lower or upper limit on the frequency of data collection. +However, in practice, high frequency data collection (> 100Hz) requires special considerations on the ESP32. + +{{% /tab %}} +{{< /tabs >}} + +## Security + +The data management service uses {{< glossary_tooltip term_id="grpc" text="gRPC" >}} calls to send and receive data, so your data is encrypted while in flight. +When data is stored in the cloud, it is encrypted at rest by the cloud storage provider. + +## Data integrity + +Viam's data management service is designed to safeguard against data loss, data duplication and otherwise compromised data. + +If the internet becomes unavailable or the machine needs to restart during the sync process, the sync is interrupted. +If the sync process is interrupted, the service will retry uploading the data at exponentially increasing intervals until the interval in between tries is at one hour, at which point the service retries the sync every hour. +When the connection is restored and sync resumes, the service continues sync where it left off without duplicating data. +If the interruption happens mid-file, sync resumes from the beginning of that file. + +To avoid syncing files that are still being written to, the data management service only syncs arbitrary files that haven't been modified in the previous 10 seconds. +This default can be changed with the [`file_last_modified_millis` config attribute](/data-ai/capture-sync/#configure-the-data-management-service). + +## Storage + +Data that is successfully synced to the cloud is automatically deleted from local storage. + +When a machine loses its internet connection, it cannot resume cloud sync until it can reach the Viam Cloud again. + +{{}} + +To ensure that the machine can store all data captured while it has no connection, you need to provide enough local data storage. + +If your robot is offline and can't sync and your machine's disk fills up beyond a certain threshold, the data management service will delete captured data to free up additional space and maintain a working machine. +For more information, see [Automatic data deletion details](/data-ai/capture-data/capture-sync/#click-for-more-automatic-data-deletion-details) + +Data capture supports capturing tabular data directly to MongoDB in addition to capturing to disk. +For more information, see [Capture directly to MongoDB](/data-ai/capture-data/capture-sync/#capture-directly-to-mongodb). diff --git a/docs/data-ai/capture-data/capture-other-sources.md b/docs/data-ai/capture-data/capture-other-sources.md new file mode 100644 index 0000000000..3f8ef32e65 --- /dev/null +++ b/docs/data-ai/capture-data/capture-other-sources.md @@ -0,0 +1,12 @@ +--- +linkTitle: "Capture other data sources" +title: "Capture and sync other data sources" +tags: ["data management", "data", "services"] +weight: 12 +layout: "docs" +type: "docs" +platformarea: ["data"] +description: "TODO" +date: "2024-12-17" +draft: true +--- diff --git a/docs/services/data/_index.md b/docs/data-ai/capture-data/capture-sync.md similarity index 71% rename from docs/services/data/_index.md rename to docs/data-ai/capture-data/capture-sync.md index ea76f012ef..3c9429752c 100644 --- a/docs/services/data/_index.md +++ b/docs/data-ai/capture-data/capture-sync.md @@ -1,13 +1,13 @@ --- -title: "Data Management Service" -linkTitle: "Data Management" -description: "Configure the data management service to capture data from your components and services and sync it to the cloud." +linkTitle: "Capture edge data" +title: "Capture and sync edge data" +tags: ["data management", "data", "services"] weight: 10 +layout: "docs" type: "docs" -tags: ["data management", "cloud", "sync", "capture"] -icon: true -images: ["/services/icons/data-capture.svg"] -no_list: true +platformarea: ["data"] +description: "Capture data from a resource on your machine and sync the data to the cloud." +date: "2024-12-03" aliases: - /services/data/capture/ - /data/capture/ @@ -16,190 +16,41 @@ aliases: - /services/data/cloud-sync/ - /data/cloud-sync/ - /services/data/capture-sync/ -no_service: true -date: "2022-01-01" -# updated: "" # When the content was last entirely checked + - /how-tos/sensor-data/ + - /services/data/ + - fleet/data-management/ + - /manage/data-management/ + - /services/data-management/ + - /manage/data/ + - "/data-management/" + - "/data-management/" + - "/services/data/" + - "/data/" + - /manage/data/export/ + - /data/export/ + - /services/data/export/ + - /manage/data/view/ + - /data/view/ + - /services/data/view/ --- -The data management service captures data from one or more {{< glossary_tooltip term_id="resource" text="resources" >}} locally, and syncs it to cloud storage when a connection to the cloud is available. -You can configure which data you want to capture, as well as the capture rate and the sync frequency. +You can use data management service to capture and sync data from your machine to the cloud. +You can capture data from [supported components and services](#supported-resources) or from arbitrary folders on your machines. -{{< tabs >}} -{{% tab name="viam-server" %}} - -The data is captured locally on the machine's storage and, by default, stored in the `~/.viam/capture` directory. - -If a machine restarts for any reason, capture automatically resumes and any data from already stored but not yet synced is synced. - -The service can capture data from multiple resources at the same or different frequencies. -The service does not impose a lower or upper limit on the frequency of data collection. -However, in practice, your hardware may impose limits on the frequency of data collection. -Avoid configuring data capture to higher rates than your hardware can handle, as this could lead to performance degradation. - -Data capture is frequently used with cloud sync. -However, if you want to manage your machine's captured data yourself, you can enable only data capture without cloud sync. - -{{% /tab %}} -{{% tab name="viam-micro-server" %}} - -The data is captured in the ESP32's flash memory until it is uploaded to the Viam Cloud. - -If the machine restarts before all data is synced, all unsynced data captured since the last sync point is lost. - -The service can capture data from multiple resources at the same or different frequencies. -The service does not impose a lower or upper limit on the frequency of data collection. -However, in practice, high frequency data collection (> 100Hz) requires special considerations on the ESP32. - -{{% /tab %}} -{{< /tabs >}} - -{{< expand "Click for an example." >}} -Consider a tomato picking robot with a 3D camera and an arm. -When you configure the robot, you might set the camera to capture point cloud data at a frequency of 30Hz. -For the arm, you might capture joint positions at 1Hz. - -If your requirements change and you want to capture data from both components at 10Hz, you can change the capture rate at any time in each component's data capture configuration. -{{< /expand >}} - -{{< alert title="In this page" color="note" >}} -{{% toc %}} -{{< /alert >}} - -## Security - -The data management service uses {{< glossary_tooltip term_id="grpc" text="gRPC" >}} calls to send and receive data, so your data is encrypted while in flight. -When data is stored in the cloud, it is encrypted at rest by the cloud storage provider. - -## Data Integrity - -Viam's data management service is designed to safeguard against data loss, data duplication and otherwise compromised data. - -If the internet becomes unavailable or the machine needs to restart during the sync process, the sync is interrupted. -If the sync process is interrupted, the service will retry uploading the data at exponentially increasing intervals until the interval in between tries is at one hour, at which point the service retries the sync every hour. -When the connection is restored and sync resumes, the service continues sync where it left off without duplicating data. -If the interruption happens mid-file, sync resumes from the beginning of that file. - -To avoid syncing files that are still being written to, the data management service only syncs files that haven't been modified in the previous 10 seconds. - -## Storage - -Data that is successfully synced to the cloud is automatically deleted from local storage. - -When a machine loses its internet connection, it cannot resume cloud sync until it can reach the Viam Cloud again. - -{{}} - -To ensure that the machine can store all data captured while it has no connection, you need to provide enough local data storage. - -{{< alert title="Warning" color="warning" >}} - -If your machine's disk fills up beyond a certain threshold, the data management service will delete captured data to free up additional space and maintain a working machine. - -{{< /alert >}} - -{{< expand "Automatic data deletion details" >}} - -If cloud sync is enabled, the data management service deletes captured data once it has successfully synced to the cloud. - -With `viam-server`, the data management service will also automatically delete local data in the event your machine's local storage fills up. -Local data is automatically deleted when _all_ of the following conditions are met: - -- Data capture is enabled on the data management service -- Local disk usage percentage is greater than or equal to 90% -- The Viam capture directory is at least 50% of the current local disk usage - -If local disk usage is greater than or equal to 90%, but the Viam capture directory is not at least 50% of that usage, a warning log message will be emitted instead and no action will be taken. - -Automatic file deletion only applies to files in the specified Viam capture directory, which is set to `~/.viam/capture` by default. -Data outside of this directory is not touched by automatic data deletion. - -If your machine captures a large amount of data, or frequently goes offline for long periods of time while capturing data, consider moving the Viam capture directory to a larger, dedicated storage device on your machine if available. -You can change the capture directory using the `capture_dir` attribute. - -You can also control how local data is deleted if your machine's local storage becomes full, using the `delete_every_nth_when_disk_full` attribute. - -{{< /expand >}} - -{{< expand "Capture directly to MongoDB" >}} - -Data capture supports capturing tabular data directly to MongoDB in addition to capturing to disk. - -This feature is intended to support use cases like offline dashboards which don't require strong data delivery or consistency guarantees. - -Here is a sample configuration that will capture fake sensor readings both to the configured MongoDB URI as well as to the `~/.viam/capture` directory on disk: - -```json -{ - "components": [ - { - "name": "sensor-1", - "namespace": "rdk", - "type": "sensor", - "model": "fake", - "attributes": {}, - "service_configs": [ - { - "type": "data_manager", - "attributes": { - "capture_methods": [ - { - "method": "Readings", - "capture_frequency_hz": 0.5, - "additional_params": {} - } - ] - } - } - ] - } - ], - "services": [ - { - "name": "data_manager-1", - "namespace": "rdk", - "type": "data_manager", - "attributes": { - "mongo_capture_config": { - "uri": "mongodb://127.0.0.1:27017/?directConnection=true&serverSelectionTimeoutMS=2000" - } - } - } - ] -} -``` - -When `mongo_capture_config.uri` is configured, data capture will attempt to connect to the configured MongoDB server and write captured tabular data to the configured `mongo_capture_config.database` and `mongo_capture_config.collection` (or their defaults if unconfigured) after enqueuing that data to be written to disk. - -If writes to MongoDB fail for any reason, data capture will log an error for each failed write and continue capturing. - -Failing to write to MongoDB doesn't affect capturing and syncing data to cloud storage other than adding capture latency. - -{{< alert title="Caution" color="caution" >}} - -- Capturing directly to MongoDB may write data to MongoDB that later fails to be written to disk (and therefore never gets synced to cloud storage). -- Capturing directly to MongoDB does not retry failed writes to MongoDB. As a consequence, it is NOT guaranteed all data captured will be written to MongoDB. - This can happen in cases such as MongoDB being inaccessible to `viam-server` or writes timing out. -- Capturing directly to MongoDB may reduce the maximum frequency that data capture can capture data due to the added latency of writing to MongoDB. - If your use case needs to support very high capture rates, this feature may not be appropriate. - -{{< /alert >}} - -{{< /expand >}} - -## Configuration - -To capture data from one or more machines, you must first [configure the data management service](#data-management-service-configuration). -Then [configure data management](#resource-data-capture-configuration) on each {{< glossary_tooltip term_id="resource" text="resource" >}} that you want to capture data from. +## Configure the data management service -### Data management service configuration +To start, configure the data management service to capture and sync data. {{< tabs >}} {{% tab name="Config Builder" %}} From your machine's **CONFIGURE** tab in the [Viam app](https://app.viam.com), add the `data management` service. -On the panel that appears, configure data capture and sync attributes as applicable, then save your config. +On the panel that appears, configure data capture and sync attributes as applicable. +To both capture data and sync it to the cloud, keep both **Capturing** and **Syncing** switched on. -![Data capture configuration](/tutorials/data-management/data-management-conf.png) +Click the **Save** button in the top right corner of the page to save your config. + +{{< imgproc src="/tutorials/data-management/data-management-conf.png" alt="Data capture configuration card." resize="600x" >}} {{% /tab %}} {{% tab name="JSON Example" %}} @@ -270,28 +121,91 @@ The following attributes are available for the data management service: | `additional_sync_paths` | string array | Optional | Paths to any other directories on your machine from which you want to sync data to the cloud. Once data is synced from a directory, it is automatically deleted from your machine. |

| | `sync_interval_mins` | float | Optional | Time interval in minutes between syncing to the cloud. Viam does not impose a minimum or maximum on the frequency of data syncing. However, in practice, your hardware or network speed may impose limits on the frequency of data syncing.
Default: `0.1`, meaning once every 6 seconds. |

| | `delete_data_on_part_deletion` | bool | Optional | Whether deleting this {{< glossary_tooltip term_id="machine" text="machine" >}} or {{< glossary_tooltip term_id="part" text="machine part" >}} should result in deleting all the data captured by that machine part.
Default: `false` |

| -| `delete_every_nth_when_disk_full` | int | Optional | How many files to delete when local storage meets the [fullness criteria](/services/data/#storage). The data management service will delete every Nth file that has been captured upon reaching this threshold. Use JSON mode to configure this attribute.
Default: `5`, meaning that every fifth captured file will be deleted. |

| +| `delete_every_nth_when_disk_full` | int | Optional | How many files to delete when local storage meets the [fullness criteria](/data-ai/capture-data/advanced/how-sync-works/#storage). The data management service will delete every Nth file that has been captured upon reaching this threshold. Use JSON mode to configure this attribute.
Default: `5`, meaning that every fifth captured file will be deleted. |

| | `maximum_num_sync_threads` | int | Optional | Max number of CPU threads to use for syncing data to the Viam Cloud.
Default: [runtime.NumCPU](https://pkg.go.dev/runtime#NumCPU)/2 so half the number of logical CPUs available to viam-server |

| -| `mongo_capture_config.uri` | string | Optional | The [MongoDB URI](https://www.mongodb.com/docs/v6.2/reference/connection-string/) data capture will attempt to write tabular data to after it is enqueued to be written to disk. When non-empty, data capture will capture tabular data to the configured MongoDB database and collection at that URI.
See `mongo_capture_config.database` and `mongo_capture_config.collection` below for database and collection defaults.
See [Data capture directly to MongoDB](/services/data/#capture-directly-to-mongodb) for an example config.|

| +| `mongo_capture_config.uri` | string | Optional | The [MongoDB URI](https://www.mongodb.com/docs/v6.2/reference/connection-string/) data capture will attempt to write tabular data to after it is enqueued to be written to disk. When non-empty, data capture will capture tabular data to the configured MongoDB database and collection at that URI.
See `mongo_capture_config.database` and `mongo_capture_config.collection` below for database and collection defaults.
See [Data capture directly to MongoDB](/data-ai/capture-data/advanced/how-sync-works/#storage) for an example config.|

| | `mongo_capture_config.database` | string | Optional | When `mongo_capture_config.uri` is non empty, changes the database data capture will write tabular data to.
Default: `"sensorData"` |

| | `mongo_capture_config.collection` | string | Optional | When `mongo_capture_config.uri` is non empty, changes the collection data capture will write tabular data to.
Default: `"readings"` |

| | `cache_size_kb` | float | Optional | `viam-micro-server` only. The maximum amount of storage bytes (in kilobytes) allocated to a data collector.
Default: `1` KB. |

| +| `file_last_modified_millis` | float | Optional | The amount of time to pass since arbitrary files were last modified until they are synced. Normal .capture files are synced as soon as they are able to be synced.
Default: `10000` milliseconds. |

| + +### Capture directly to MongoDB + +Data capture supports capturing tabular data directly to MongoDB in addition to capturing to disk. + +This feature is intended to support use cases like offline dashboards which don't require strong data delivery or consistency guarantees. + +Here is a sample configuration that will capture fake sensor readings both to the configured MongoDB URI as well as to the `~/.viam/capture` directory on disk: -### Resource data capture configuration +```json +{ + "components": [ + { + "name": "sensor-1", + "namespace": "rdk", + "type": "sensor", + "model": "fake", + "attributes": {}, + "service_configs": [ + { + "type": "data_manager", + "attributes": { + "capture_methods": [ + { + "method": "Readings", + "capture_frequency_hz": 0.5, + "additional_params": {} + } + ] + } + } + ] + } + ], + "services": [ + { + "name": "data_manager-1", + "namespace": "rdk", + "type": "data_manager", + "attributes": { + "mongo_capture_config": { + "uri": "mongodb://127.0.0.1:27017/?directConnection=true&serverSelectionTimeoutMS=2000" + } + } + } + ] +} +``` + +When `mongo_capture_config.uri` is configured, data capture will attempt to connect to the configured MongoDB server and write captured tabular data to the configured `mongo_capture_config.database` and `mongo_capture_config.collection` (or their defaults if unconfigured) after enqueuing that data to be written to disk. + +If writes to MongoDB fail for any reason, data capture will log an error for each failed write and continue capturing. + +Failing to write to MongoDB doesn't affect capturing and syncing data to cloud storage other than adding capture latency. + +{{< alert title="Caution" color="caution" >}} + +- Capturing directly to MongoDB may write data to MongoDB that later fails to be written to disk (and therefore never gets synced to cloud storage). +- Capturing directly to MongoDB does not retry failed writes to MongoDB. As a consequence, it is NOT guaranteed all data captured will be written to MongoDB. + This can happen in cases such as MongoDB being inaccessible to `viam-server` or writes timing out. +- Capturing directly to MongoDB may reduce the maximum frequency that data capture can capture data due to the added latency of writing to MongoDB. + If your use case needs to support very high capture rates, this feature may not be appropriate. + +{{< /alert >}} + +## Configure data capture You can capture data for any {{< glossary_tooltip term_id="resource" text="resource" >}} that supports it, including resources on {{< glossary_tooltip term_id="remote-part" text="remote parts" >}}. +Scroll to the resource card you wish to configure data capture and sync on. {{< tabs >}} - {{% tab name="Regular" %}} -Once you have added the data capture service, you can specify the data you want to capture at a resource level. - {{< tabs >}} {{% tab name="Config builder" %}} For each resource you can capture data for, there is a **Data capture** section in its panel. -Select a **Method** and specify a capture **Frequency** in hertz. +Select a **Method** and specify a capture **Frequency** in hertz, for example to `0.1` to capture an image every 10 seconds. You can add multiple methods with different capture frequencies. Some methods will prompt you to add additional parameters. @@ -551,7 +465,7 @@ To add them to your JSON configuration you must explicitly add the remote resour {{< expand "Click to view example JSON configuration for an ESP32 board" >}} -The following example shows the configuration of the remote part, in this case an [ESP32 board](/components/board/esp32/). +The following example shows the configuration of the remote part, in this case an [ESP32 board](/operate/reference/components/board/esp32/). This config is just like that of a non-remote part; the remote connection is established by the main part (in the next expandable example). ```json {class="line-numbers linkable-line-numbers"} @@ -736,88 +650,74 @@ The following attributes are available for data capture configuration: | Name | Type | Required? | Description | | ------------------ | ------ | --------- | ----------- | | `capture_frequency_hz` | float | **Required** | Frequency in hertz at which to capture data. For example, to capture a reading every 2 seconds, enter `0.5`. | -| `method` | string | **Required** | Depends on the type of component or service. See [Supported components and services](/services/data/#supported-components-and-services). | +| `method` | string | **Required** | Depends on the type of component or service. See [Supported components and services](/data-ai/capture-data/capture-sync/#supported-resources). | | `retention_policy` | object | Optional | Option to configure how long data collected by this component or service should remain stored in the Viam Cloud. You must set this in JSON mode. See the JSON example for a camera component.
**Options:** `"days": `, `"binary_limit_gb": `, `"tabular_limit_gb": `.
Days are in UTC time. Setting a retention policy of 1 day means that data stored now will be deleted the following day **in UTC time**. You can set either or both of the size limit options and size is in gigabytes. | | `additional_params` | depends | depends | Varies based on the method. For example, `ReadImage` requires a MIME type. | -### Supported components and services +Click the **Save** button in the top right corner of the page to save your config. -The following components and services support data capture, for the following methods: +If cloud sync is enabled, the data management service deletes captured data once it has successfully synced to the cloud. -{{< tabs >}} -{{% tab name="viam-server" %}} +{{< alert title="Warning" color="warning" >}} - -| Type | Method | -| ----------------------------------------------- | ------ | -| [Arm](/components/arm/) | `EndPosition`, `JointPositions` | -| [Board](/components/board/) | `Analogs`, `Gpios` | -| [Camera](/components/camera/) | `GetImages`, `ReadImage`, `NextPointCloud` | -| [Encoder](/components/encoder/) | `TicksCount` | -| [Gantry](/components/gantry/) | `Lengths`, `Position` | -| [Motor](/components/motor/) | `Position`, `IsPowered` | -| [Movement sensor](/components/movement-sensor/) | `AngularVelocity`, `CompassHeading`, `LinearAcceleration`, `LinearVelocity`, `Orientation`, `Position` | -| [Sensor](/components/sensor/) | `Readings` | -| [Servo](/components/servo/) | `Position` | -| [Vision service](/services/vision/) | `CaptureAllFromCamera` | +If your robot is offline and can't sync and your machine's disk fills up beyond a certain threshold, the data management service will delete captured data to free up additional space and maintain a working machine. -{{% /tab %}} -{{% tab name="viam-micro-server" %}} +{{< /alert >}} - -| Type | Method | -| ---- | ------ | -| [Movement Sensor](/components/movement-sensor/) | [`AngularVelocity`](/appendix/apis/components/movement-sensor/#getangularvelocity), [`LinearAcceleration`](/appendix/apis/components/movement-sensor/#getlinearacceleration), [`LinearVelocity`](/appendix/apis/components/movement-sensor/#getlinearvelocity) | -| [Sensor](/components/sensor/) | [`GetReadings`](/appendix/apis/components/sensor/#getreadings) | +{{< expand "Click for more automatic data deletion details" >}} -{{% /tab %}} -{{< /tabs >}} +With `viam-server`, the data management service will also automatically delete local data in the event your machine's local storage fills up. +Local data is automatically deleted when _all_ of the following conditions are met: -## View captured data +- Data capture is enabled on the data management service +- Local disk usage percentage is greater than or equal to 90% +- The Viam capture directory is at least 50% of the current local disk usage -To view all the captured data you have access to, go to the [**DATA** tab](https://app.viam.com/data/view) where you can filter by location, type of data, and more. +If local disk usage is greater than or equal to 90%, but the Viam capture directory is not at least 50% of that usage, a warning log message will be emitted instead and no action will be taken. -You can also access data from a resource, machine part, or machine menu. +Automatic file deletion only applies to files in the specified Viam capture directory, which is set to `~/.viam/capture` by default. +Data outside of this directory is not touched by automatic data deletion. -## Considerations +If your machine captures a large amount of data, or frequently goes offline for long periods of time while capturing data, consider moving the Viam capture directory to a larger, dedicated storage device on your machine if available. +You can change the capture directory using the `capture_dir` attribute. -- **Capturing too much data**: You can [use filtering to collect and sync only certain images](/how-tos/image-data/#use-filtering-to-collect-and-sync-only-certain-images) to capture data selectively. -- **Rentention policy**: Set a `retention_policy` attribute in your [data capture configuration](#resource-data-capture-configuration) to avoid keeping data stored in the Viam Cloud longer than a specified number of days. -- **Pausing sync**: You can pause cloud sync at any time by navigating to your machine's **CONFIGURE** tab and disabling **Syncing** for your [data management service](../). +You can also control how local data is deleted if your machine's local storage becomes full, using the `delete_every_nth_when_disk_full` attribute. - If you have captured data that you do not want to sync, delete the data on the machine before resuming cloud sync. - To delete the data locally, `ssh` into your machine and delete the data in the directory where you capture data. +{{< /expand >}} -- **Sync data conditionally**: You can use a {{< glossary_tooltip term_id="module" text="module" >}} to sync data only when a certain logic condition is met, instead of at a regular time interval. - For example, if you rely on mobile data but have intermittent WiFi connection in certain locations or at certain times of the day, you may want to trigger sync to only occur when these conditions are met. - To set up triggers for syncing see [Conditional cloud sync](/how-tos/conditional-sync/). +## Stop data capture -## API +If this is a test project, make sure you stop data capture to avoid charges for a large amount of unwanted data. -The [data management service API](/appendix/apis/services/data/) supports the following methods: +In the **Data capture** section of your resource's configuration card, toggle the switch to **Off**. -{{< readfile "/static/include/services/apis/generated/data_manager-table.md" >}} +Click the **Save** button in the top right corner of the page to save your config. -The data client API supports a separate set of methods that allow you to upload and export data to and from the Viam app. -For information about that API, see [Data Client API](/appendix/apis/data-client/). +## View captured data -## Troubleshooting +To view all the captured data you have access to, go to the [**DATA** tab](https://app.viam.com/data/view) where you can filter by location, type of data, and more. -### Images are dim on start up +You can also access data from a resource or machine part menu. -If you are capturing camera data, it can happen that the camera captures and syncs miscolored or dark images upon start up. -Wait for a few seconds and you should see correctly colored images. +## Supported resources -## Next steps +The following components and services support data capture and cloud sync: + +{{< readfile "/static/include/data/capture-supported.md" >}} + +## Considerations -If you have synced data, such as [sensor](/components/sensor/) readings, you can [query that data with SQL or MQL](/how-tos/sensor-data-query-with-third-party-tools/) from the Viam app or a MQL-compatible client. -If you have synced images, you can use those images to [train machine learning models](/how-tos/train-deploy-ml/) within the Viam app. +- **Capturing too much data**: You can [use filtering to collect and sync only certain images](/data-ai/capture-data/filter-before-sync/) to capture data selectively. +- **Rentention policy**: Set a `retention_policy` attribute in your [data capture configuration](#configure-data-capture) to avoid keeping data stored in the Viam Cloud longer than a specified number of days. +- **Pausing sync**: You can pause cloud sync at any time by navigating to your machine's **CONFIGURE** tab and disabling **Syncing** for your data management service. -Or check out the following guides and tutorials: + If you have captured data that you do not want to sync, delete the data on the machine before resuming cloud sync. + To delete the data locally, `ssh` into your machine and delete the data in the directory where you capture data. + +- **Sync data conditionally**: You can use a {{< glossary_tooltip term_id="module" text="module" >}} to sync data only when a certain logic condition is met, instead of at a regular time interval. + For example, if you rely on mobile data but have intermittent WiFi connection in certain locations or at certain times of the day, you may want to trigger sync to only occur when these conditions are met. + To set up triggers for syncing see [Conditional cloud sync](/data-ai/capture-data/conditional-sync/). + +## Next steps -{{< cards >}} -{{% card link="/how-tos/image-data/" %}} -{{% card link="/how-tos/train-deploy-ml/" %}} -{{% card link="/how-tos/performance-metrics/" %}} -{{% card link="/tutorials/control/air-quality-fleet/" %}} -{{< /cards >}} +Now that you have captured data, you could [create a dataset](/data-ai/ai/create-dataset/) and use this data to [train your own Machine Learning model](/data-ai/ai/train-tflite/) with the Viam platform. diff --git a/docs/how-tos/conditional-sync.md b/docs/data-ai/capture-data/conditional-sync.md similarity index 90% rename from docs/how-tos/conditional-sync.md rename to docs/data-ai/capture-data/conditional-sync.md index 8216279723..b5a7600ec8 100644 --- a/docs/how-tos/conditional-sync.md +++ b/docs/data-ai/capture-data/conditional-sync.md @@ -1,8 +1,9 @@ --- title: "Conditional cloud sync" -linkTitle: "Conditional data sync" +linkTitle: "Conditional sync" description: "Trigger cloud sync to sync captured data when custom conditions are met." type: "docs" +weight: 20 tags: ["data management", "cloud", "sync"] images: ["/services/icons/data-cloud-sync.svg"] icon: true @@ -13,10 +14,8 @@ aliases: languages: [] viamresources: ["sensor", "data_manager"] platformarea: ["data", "registry"] -level: "Intermediate" -date: "2024-09-02" -# updated: "" # When the tutorial was last entirely checked -cost: "0" +next: /data-ai/capture-data/advanced/how-sync-works/ +date: "2024-12-04" --- You may want to sync data only when a certain logic condition is met, instead of at a regular time interval. @@ -25,15 +24,9 @@ Or, you may want to trigger sync only when your machine detects an object of a c You can use the [trigger-sync-examples module](https://github.com/viam-labs/trigger-sync-examples-v2) if one of these examples is what you are looking for. If you need different logic, you can create a modular sensor that determines if the conditions for sync are met or not. -This guide will show you the implementation of a sensor which only allows sync during a defined time interval. +This page will show you the implementation of a sensor which only allows sync during a defined time interval. You can use it as the basis of your own custom logic. -{{% alert title="In this page" color="tip" %}} - -{{% toc %}} - -{{% /alert %}} - ## Prerequisites {{% expand "A running machine connected to the Viam app. Click to see instructions." %}} @@ -44,7 +37,7 @@ You can use it as the basis of your own custom logic. {{< expand "Enable data capture and sync on your machine." >}} -Add the [data management service](/services/data/): +Add the [data management service](/data-ai/capture-data/capture-sync/#configure-the-data-management-service): On your machine's **CONFIGURE** tab, click the **+** icon next to your machine part in the left-hand menu and select **Service**. @@ -292,11 +285,11 @@ You have now configured sync to happen during a specific time slot. ## Test your sync configuration -To test your setup, [configure a webcam](/components/camera/webcam/) or another component and [enable data capture on the component](/services/data/#configuration). +To test your setup, [configure a webcam](/operate/reference/components/camera/webcam/) or another component and [enable data capture on the component](/data-ai/capture-data/capture-sync/#configure-the-data-management-service). Make sure to physically connect any hardware parts to the computer controlling your machine. For a camera component, use the `ReadImage` method. The data manager will now capture data. -Go to the [**CONTROL** tab](/fleet/control/). +Go to the [**CONTROL** tab](/manage/troubleshoot/teleoperate/default-interface/#viam-app). You should see the sensor. Click on `GetReadings`. @@ -307,22 +300,3 @@ If you are in the time frame for sync, the time sync sensor will return true. You can confirm that if data is currently syncing by going to the [**Data** tab](https://app.viam.com/data/view). If you are not in the time frame for sync, adjust the configuration of your time sync sensor. Then check again on the **CONTROL** and **Data** tab to confirm data is syncing. - -## Next steps - -You can now use custom logic to trigger sync conditionally. -For more information, see: - - - -{{< cards >}} -{{% card link="/how-tos/sensor-module/" %}} -{{% card link="/how-tos/create-module/" %}} -{{% manualcard link="https://github.com/viam-labs/trigger-sync-examples-v2" %}} - -

Sync Trigger Examples

- -Other example code for modules that trigger sync. - -{{% /manualcard %}} -{{< /cards >}} diff --git a/docs/data-ai/capture-data/filter-before-sync.md b/docs/data-ai/capture-data/filter-before-sync.md new file mode 100644 index 0000000000..a55e4d6e49 --- /dev/null +++ b/docs/data-ai/capture-data/filter-before-sync.md @@ -0,0 +1,117 @@ +--- +linkTitle: "Filter data" +title: "Filter data before sync" +weight: 13 +layout: "docs" +type: "docs" +description: "Use filtering to collect and sync only certain images." +aliases: + - /how-tos/image-data/ +--- + +You can use filtering to selectively capture images using a machine learning (ML) model, for example to only capture images with people or specific objects in them. + +Contributors have written several filtering {{< glossary_tooltip term_id="module" text="modules" >}} that you can use to filter image capture. +The following steps use the [`filtered_camera`](https://github.com/erh/filtered_camera) module: + +{{< table >}} +{{% tablestep link="/data-ai/ai/deploy/"%}} +{{}} +**1. Add an ML model service to your machine** + +Add an ML model service on your machine that is compatible with the ML model you want to use, for example [TFLite CPU](https://github.com/viam-modules/mlmodel-tflite). + +{{% /tablestep %}} +{{% tablestep link="/operate/reference/services/vision/"%}} +{{}} +**2. Select a suitable ML model** + +Click **Select model** on the ML model service configuration panel, then select an [existing model](https://app.viam.com/registry?type=ML+Model) you want to use, or click **Upload a new model** to upload your own. +If you're not sure which model to use, you can use [`EfficientDet-COCO`](https://app.viam.com/ml-model/viam-labs/EfficientDet-COCO) from the **Registry**, which can detect people and animals, among other things. + +{{% /tablestep %}} +{{% tablestep link="/operate/reference/services/vision/"%}} +{{}} +**3. Add a vision service to use with the ML model** + +You can think of the vision service as the bridge between the ML model service and the output from your camera. + +Add and configure the `vision / ML model` service on your machine. +From the **Select model** dropdown, select the name of your ML model service (for example, `mlmodel-1`). + +{{% /tablestep %}} +{{% tablestep %}} +{{}} +**4. Configure the filtered camera** + +The `filtered-camera` {{< glossary_tooltip term_id="modular-resource" text="modular component" >}} pulls the stream of images from the camera you configured earlier, and applies the vision service to it. + +Configure a `filtered-camera` component on your machine, following the [attribute guide in the README](https://github.com/erh/filtered_camera?tab=readme-ov-file#configure-your-filtered-camera). +Use the name of the camera you configured in the first part of this guide as the `"camera"` to pull images from, and select the name of the vision service you just configured as your `"vision"` service. +Then add all or some of the labels your ML model uses as classifications or detections in `"classifications"` or `"objects"`. + +For example, if you are using the `EfficientDet-COCO` model, you could use a configuration like the following to only capture images when a person is detected with more than 60% confidence in your camera stream. + +```json {class="line-numbers linkable-line-numbers"} +{ + "window_seconds": 0, + "objects": { + "Person": 0.8 + }, + "camera": "camera-1", + "vision": "vision-1" +} +``` + +Additionally, you can also add a buffer window with `window_seconds` which controls the duration of a buffer of images captured prior to a successful match. +If you were to set `window_seconds` to `3`, the camera would also capture and sync images from the 3 seconds before a person appeared in the camera stream. + +{{% /tablestep %}} +{{% tablestep %}} +{{}} +**5. Configure data capture and sync on the filtered camera** + +Configure data capture and sync on the filtered camera just as you did before for the physical camera. +The filtered camera will only capture image data that passes the filters you configured in the previous step. + +Turn off data capture on your original camera if you haven't already, so that you don't capture duplicate or unfiltered images. + +{{% /tablestep %}} +{{% tablestep %}} +**6. Save to start capturing** + +Save the config. +With cloud sync enabled, captured data is automatically uploaded to the Viam app after a short delay. + +{{% /tablestep %}} +{{% tablestep %}} + +{{}} +**7. View filtered data in the Viam app** + +Once you save your configuration, place something that is part of your trained ML model within view of your camera. + +Images that pass your filter will be captured and will sync at the specified sync interval, which may mean you have to wait and then refresh the page for data to appear. +Your images will begin to appear under the **DATA** tab. + +If no data appears after the sync interval, check the [**Logs**](/manage/troubleshoot/troubleshoot/#check-logs) and ensure that the condition for filtering is met. +You can test the vision service from the [**CONTROL** tab](/manage/troubleshoot/teleoperate/default-interface/) to see its classifications and detections live. + +{{% /tablestep %}} +{{% tablestep %}} +{{}} +**7. (Optional) Trigger sync with custom logic** + +By default, the captured data syncs at the regular interval you specified in the data capture config. +If you need to trigger sync in a different way, see [Conditional cloud sync](/data-ai/capture-data/conditional-sync/) for a documented example of syncing data only at certain times of day. + +{{% /tablestep %}} +{{< /table >}} + +## Stop data capture on the filtered camera + +If this is a test project, make sure you stop data capture to avoid [incurring fees](https://www.viam.com/product/pricing) for capturing large amounts of test data. + +In the **Data capture** section of your filtered camera's configuration, toggle the switch to **Off**. + +Click the **Save** button in the top right corner of the page to save your config. diff --git a/docs/data-ai/data/_index.md b/docs/data-ai/data/_index.md new file mode 100644 index 0000000000..422b01922b --- /dev/null +++ b/docs/data-ai/data/_index.md @@ -0,0 +1,10 @@ +--- +linkTitle: "Work with data" +title: "Work with data" +weight: 200 +layout: "empty" +type: "docs" +empty_node: true +open_on_desktop: true +header_only: true +--- diff --git a/docs/data-ai/data/advanced/_index.md b/docs/data-ai/data/advanced/_index.md new file mode 100644 index 0000000000..f004c032cf --- /dev/null +++ b/docs/data-ai/data/advanced/_index.md @@ -0,0 +1,8 @@ +--- +linkTitle: "Advanced" +title: "Advanced" +weight: 200 +layout: "empty" +type: "docs" +empty_node: true +--- diff --git a/docs/data-ai/data/advanced/alert-data.md b/docs/data-ai/data/advanced/alert-data.md new file mode 100644 index 0000000000..13d0cc28e2 --- /dev/null +++ b/docs/data-ai/data/advanced/alert-data.md @@ -0,0 +1,305 @@ +--- +linkTitle: "Alert on data" +title: "Alert on data" +weight: 60 +layout: "docs" +type: "docs" +description: "Use triggers to send email notifications or webhook requests when data from the machine is synced." +prev: "/data-ai/data/export/" +--- + +You can use triggers to send email notifications or webhook requests when data from the machine is synced, even captured from a specific component with a specified condition. +For example, you can configure a trigger to send you a notification when your robot's sensor collects a new reading. + +Follow this guide to learn how to configure a trigger to send webhook requests or emails for the following events: + +- **Data has been synced to the cloud**: trigger when data from the machine is synced +- **Conditional data ingestion**: trigger any time data is captured from a specified component with a specified method and condition + +## Configure a trigger + +To configure a trigger: + +{{< tabs >}} +{{% tab name="Builder mode" %}} + +1. Go to the **CONFIGURE** tab of your machine on the [Viam app](https://app.viam.com). + Click the **+** (Create) button in the left side menu and select **Trigger**. + + {{}} + +2. Name the trigger and click **Create**. + +3. Select trigger **Type**. + Configure additional attributes: + +{{< tabs name="Types of Triggers" >}} +{{% tab name="Data synced to cloud" %}} + +Select the data types for which the trigger should send requests. +Whenever data of the specified data types is ingested, a `POST` request will be sent. + +{{% /tab %}} +{{% tab name="Conditional data ingestion" %}} + +Select the component you want to capture data from and the method you want to capture data from. +Then, add any conditions. + +These can include a key, a value, and a logical operator. +For example, a trigger configured to fire when data is captured from the motor `motor-1`'s `IsPowered` method when `is_on` is equal to `True`: + +{{}} + +For more information, see [Conditions](#conditions). + +{{% alert title="Note" color="note" %}} +You must [configure data capture](/data-ai/capture-data/capture-sync/#configure-the-data-management-service) for your component to use this trigger. +{{% /alert %}} + +{{% /tab %}} +{{< /tabs >}} + +4. Add **Webhooks** or **Emails**. + +{{< tabs name="Notifications types" >}} +{{% tab name="Webhooks" %}} + +Click **Add Webhook**. +Add the URL of your cloud function or lambda. +Configure the time between notifications. + +![The trigger configured with an example URL in the Viam app.](/build/configure/trigger-configured.png) + +{{% /tab %}} +{{% tab name="Emails" %}} + +Click **Add Email**. +Add the email you wish to be notified whenever this trigger is triggered. +Configure the time between notifications. + +![The trigger configured with an example email in the Viam app.](/build/configure/trigger-configured-email.png) + +{{% /tab %}} +{{< /tabs >}} +{{% /tab %}} +{{% tab name="JSON mode" %}} + +To configure your trigger by using **JSON** mode instead of **Builder** mode, paste one of the following JSON templates into your JSON config. +`"triggers"` is a top-level section, similar to `"components"` or `"services"`. + +{{< tabs >}} +{{% tab name="JSON Template: Data Synced" %}} + +```json {class="line-numbers linkable-line-numbers"} + "triggers": [ + { + "name": "", + "event": { + "type": "part_data_ingested", + "data_ingested": { + "data_types": ["binary", "tabular", "file"] + } + }, + "notifications": [ + { + "type": "webhook", + "value": "https://1abcde2ab3cd4efg5abcdefgh10zyxwv.lambda-url.us-east-1.on.aws", + "seconds_between_notifications": + } + ] + } + ] +``` + +{{% /tab %}} +{{% tab name="JSON Template: Conditional Data Ingestion" %}} + +```json {class="line-numbers linkable-line-numbers"} +"triggers": [ + { + "name": "", + "event": { + "type": "conditional_data_ingested", + "conditional": { + "data_capture_method": "::", + "condition": { + "evals": [ + { + "operator": "", + "value": + } + ] + } + } + }, + "notifications": [ + { + "type": "email", + "value": "", + "seconds_between_notifications": + } + ] + } +] + +``` + +{{% /tab %}} +{{% tab name="JSON Example" %}} + +```json {class="line-numbers linkable-line-numbers"} +{ + "components": [ + { + "name": "local", + "model": "pi", + "type": "board", + "namespace": "rdk", + "attributes": {}, + "depends_on": [] + }, + { + "name": "my_temp_sensor", + "model": "bme280", + "type": "sensor", + "namespace": "rdk", + "attributes": {}, + "depends_on": [], + "service_configs": [ + { + "type": "data_manager", + "attributes": { + "capture_methods": [ + { + "method": "Readings", + "additional_params": {}, + "capture_frequency_hz": 0.017 + } + ] + } + } + ] + } + ], + "triggers": [ + { + "name": "trigger-1", + "event": { + "type": "part_data_ingested", + "data_ingested": { + "data_types": ["binary", "tabular", "file"] + } + }, + "notifications": [ + { + "type": "webhook", + "value": "", + "seconds_between_notifications": 0 + } + ] + } + ] +} +``` + +{{% /tab %}} +{{< /tabs >}} + +{{% /tab %}} +{{< /tabs >}} + +The following attributes are available for triggers: + + +| Name | Type | Required? | Description | +| ---- | ---- | --------- | ----------- | +| `name` | string | **Required** | The name of the trigger | +| `event` | object | **Required** | The trigger event object:
  • `type`: The type of the event to trigger on. Options: `part_data_ingested`, `conditional_data_ingested`.
  • `data_types`: Required with `type` `part_data_ingested`. The data types that trigger the event. Options: `binary`, `tabular`, `file`, `unspecified`.
  • `conditional`: Required with `type` `conditional_data_ingested`. See [Conditions](#conditions) for more information.
| +| `notifications` | object | **Required** | The notifications object:
  • `type`: The type of the notification. Options: `webhook`, `email`
  • `value`: The URL to send the request to or the email address to notify.
  • `seconds_between_notifications`: The interval between notifications in seconds.
| + +### Conditions + +The `conditional` object for the `conditional_data_ingested` trigger includes the following options: + + +| Name | Type | Required? | Description | +| ---- | ---- | --------- | ----------- | +| `data_capture_method` | string | **Required** | The method of data capture to trigger on.
Example: `sensor::Readings`. | +| `condition` | object | Optional | Any additional conditions for the method to fire the trigger. Leave out this object for the trigger to fire any time there is data synced.
Options:
  • `evals`:
    • `operator`: Logical operator for the condition.
    • `value`: An object, string, or integer that specifies the value of the method of the condition, along with the key or nested keys of the measurements in data capture.
| + +Options for `operator`: + +| Name | Description | +| ----- | ------------------------ | +| `lt` | Less than | +| `gt` | Greater than | +| `lte` | Less than or equal to | +| `gte` | Greater than or equal to | +| `eq` | Equals | +| `neq` | Does not equal | + +Examples: + +{{< tabs >}} +{{% tab name="1 level of nesting" %}} + +```json {class="line-numbers linkable-line-numbers"} +"condition": { + "evals": [ + { + "operator": "lt", + "value": { + "Line-Neutral AC RMS Voltage": 130 + } + } + ] +} +``` + +This eval would trigger for the following sensor reading: + +```json {class="line-numbers linkable-line-numbers"} +{ + "readings": { + "Line-Neutral AC RMS Voltage": 100 + } +} +``` + +{{% /tab %}} +{{% tab name="2 levels of nesting" %}} + +```json {class="line-numbers linkable-line-numbers"} +"condition": { + "evals": [ + { + "operator": "lt", + "value": { + "coordinate": { + "latitude": 50 + } + } + } + ] +} +``` + +This eval would trigger for the following sensor reading: + +```json {class="line-numbers linkable-line-numbers"} +{ + "readings": { + "coordinate": { + "latitude": 40 + } + } +} +``` + +{{% /tab %}} +{{< /tabs >}} + +5. If using a webhook, write your cloud function or lambda to process the request from `viam-server`. + You can use your cloud function or lambda to interact with any external API such as, for example, Twilio, PagerDuty, or Zapier. + +{{< readfile "/static/include/webhooks.md" >}} diff --git a/docs/how-tos/export-data.md b/docs/data-ai/data/export.md similarity index 78% rename from docs/how-tos/export-data.md rename to docs/data-ai/data/export.md index cf15488853..c131311bd8 100644 --- a/docs/how-tos/export-data.md +++ b/docs/data-ai/data/export.md @@ -1,6 +1,7 @@ --- +linkTitle: "Export data" title: "Export data" -linkTitle: "Export Data" +weight: 40 description: "Download data from the Viam app using the data client API or the Viam CLI." type: "docs" tags: ["data management", "cloud", "sync"] @@ -10,24 +11,16 @@ aliases: - /manage/data/export/ - /data/export/ - /services/data/export/ + - /how-tos/export-data/ viamresources: ["sensor", "data_manager"] platformarea: ["data", "cli"] -level: "Beginner" -date: "2024-09-13" -# updated: "" # When the tutorial was last entirely checked -cost: "0" -languages: [] +date: "2024-12-03" +next: "/data-ai/data/advanced/alert-data/" --- You can download machine data from cloud storage to your computer with the Viam CLI. -If you prefer to manage your data with code, see the [data client API documentation](/appendix/apis/data-client/). - -{{% alert title="In this page" color="tip" %}} - -- [Export data with the Viam CLI](#export-data-with-the-viam-cli) - -{{% /alert %}} +If you prefer to manage your data with code, see the [data client API documentation](/dev/reference/apis/data-client/). ## Prerequisites @@ -76,7 +69,7 @@ viam data export --org-ids= --data-type= --mime-types=}}
-You can see more information about exporting data in the [Viam CLI documentation](/cli/#data). - -## Next steps - -Other how-to guides for using and querying data include: - -{{< cards >}} -{{% card link="/how-tos/train-deploy-ml/" %}} -{{% card link="/how-tos/sensor-data-visualize/" %}} -{{% card link="/how-tos/sensor-data-query-with-third-party-tools/" %}} -{{< /cards >}} +You can see more information about exporting data in the [Viam CLI documentation](/dev/tools/cli/#data). diff --git a/docs/how-tos/sensor-data-query-with-third-party-tools.md b/docs/data-ai/data/query.md similarity index 87% rename from docs/how-tos/sensor-data-query-with-third-party-tools.md rename to docs/data-ai/data/query.md index ed62365412..0bf31f7e19 100644 --- a/docs/how-tos/sensor-data-query-with-third-party-tools.md +++ b/docs/data-ai/data/query.md @@ -1,26 +1,23 @@ --- -title: "Query sensor data with third-party tools" -linkTitle: "Query sensor data with third-party tools" +linkTitle: "Query data" +title: "Query data" +weight: 20 +layout: "docs" type: "docs" -images: ["/services/icons/data-query.svg"] -icon: true -description: "Query sensor data that you have synced to the Viam app using the Viam app with SQL or MQL." aliases: - /manage/data/query/ - /data/query/ - /use-cases/sensor-data-query/ - /use-cases/sensor-data-query-with-third-party-tools/ + - /how-tos/sensor-data-query-with-third-party-tools/ languages: [] viamresources: ["sensor", "data_manager"] platformarea: ["data", "core"] -level: "Beginner" -date: "2024-08-16" -# updated: "" # When the tutorial was last entirely checked -cost: "0" -# SME: Devin Hilly +date: "2024-12-03" +description: "Query sensor data that you have synced to the Viam app using the Viam app with SQL or MQL." --- -You can use the data management service to [capture sensor data](/how-tos/collect-sensor-data/) from any machine and sync that data to the cloud. +You can use the data management service to [capture sensor data](/data-ai/capture-data/capture-sync/) from any machine and sync that data to the cloud. Then, you can follow the steps on this page to query it using {{< glossary_tooltip term_id="sql" text="SQL" >}} or {{< glossary_tooltip term_id="mql" text="MQL" >}}. For example, you can configure data capture for several sensors on one machine, or for several sensors across multiple machines, to report the ambient operating temperature. You can then run queries against that data to search for outliers or edge cases, to analyze how the ambient temperature affects your machines' operation. @@ -30,42 +27,21 @@ You can then run queries against that data to search for outliers or edge cases, - **MQL**: Viam also supports the [MongoDB Query language](https://www.mongodb.com/docs/manual/tutorial/query-documents/) for querying captured data from MQL-compatible clients such as `mongosh` or MongoDB Compass. -{{< alert title="In this page" color="tip" >}} - -1. [Query data in the Viam app](#query-data-in-the-viam-app). -1. [Configure data query](#configure-data-query). -1. [Query data from third-party tools](#query-data-using-third-party-tools). - -{{< /alert >}} +## Query data in the Viam app -## Prerequisites +### Prerequisites {{% expand "Captured sensor data. Click to see instructions." %}} -Follow the guide to [capture sensor data](/how-tos/collect-sensor-data/). +Follow the guide to [capture sensor data](/data-ai/capture-data/capture-sync/). {{% /expand%}} -{{% expand "The Viam CLI to set up data query. Click to see instructions." %}} - -You must have the Viam CLI installed to configure querying with third-party tools. - -{{< readfile "/static/include/how-to/install-cli.md" >}} - -{{% /expand%}} - -{{% expand "mongosh or another third-party tool for querying data. Click to see instructions." %}} - -[Download the `mongosh` shell](https://www.mongodb.com/try/download/shell) or another third-party tool that can connect to a MongoDB data source to follow along. -See the [`mongosh` documentation](https://www.mongodb.com/docs/mongodb-shell/) for more information. - -{{% /expand%}} - -## Query data in the Viam app +### Query from the app Once your data has synced, you can query your data from within the Viam app using {{< glossary_tooltip term_id="sql" text="SQL" >}} or {{< glossary_tooltip term_id="mql" text="MQL" >}}. -You must have the [owner role](/cloud/rbac/) in order to query data in the Viam app. +You must have the [owner role](/manage/manage/rbac/) in order to query data in the Viam app. {{< table >}} {{% tablestep %}} @@ -163,18 +139,43 @@ For more information on MQL syntax, see the [MQL (MongoDB Query Language)](https {{% /tablestep %}} {{< /table >}} -## Configure data query +## Query data using third-party tools + +### Prerequisites + +{{% expand "Captured sensor data. Click to see instructions." %}} + +Follow the guide to [capture sensor data](/data-ai/capture-data/capture-sync/). + +{{% /expand%}} + +{{% expand "The Viam CLI to set up data query. Click to see instructions." %}} + +You must have the Viam CLI installed to configure querying with third-party tools. + +{{< readfile "/static/include/how-to/install-cli.md" >}} + +{{% /expand%}} + +{{% expand "mongosh or another third-party tool for querying data. Click to see instructions." %}} + +[Download the `mongosh` shell](https://www.mongodb.com/try/download/shell) or another third-party tool that can connect to a MongoDB data source to follow along. +See the [`mongosh` documentation](https://www.mongodb.com/docs/mongodb-shell/) for more information. + +{{% /expand%}} + +### Configure data query If you want to query data from third party tools, you have to configure data query to obtain the credentials you need to connect to the third party service. {{< readfile "/static/include/how-to/query-data.md" >}} -## Query data using third-party tools +### Query data using third-party tools You can use third-party tools, such as the [`mongosh` shell](https://www.mongodb.com/docs/mongodb-shell/) or [MongoDB Compass](https://www.mongodb.com/docs/compass/current/), to query captured sensor data. {{< table >}} -{{% tablestep link="/how-tos/sensor-data-query-with-third-party-tools/#configure-data-query"%}} +{{% tablestep link="/data-ai/data/query/#configure-data-query"%}} **1. Connect to your Viam organization's data** Run the following command to connect to your Viam organization's MongoDB Atlas instance from `mongosh` using the connection URI you obtained during query configuration: @@ -277,19 +278,6 @@ db.readings.aggregate( {{% /tablestep %}} {{< /table >}} -## Next steps - For information on connecting to your Atlas instance from other MQL clients, see the MongoDB Atlas [Connect to your Cluster Tutorial](https://www.mongodb.com/docs/atlas/tutorial/connect-to-your-cluster/). -On top of querying sensor data with third-party tools, you can also [query it with the Python SDK](/appendix/apis/data-client/) or [visualize it](/how-tos/sensor-data-visualize/). - -{{< cards >}} -{{% card link="/appendix/apis/data-client/" %}} -{{% card link="/how-tos/sensor-data-visualize/" %}} -{{< /cards >}} - -To see sensor data in action, check out this tutorial: - -{{< cards >}} -{{% card link="/tutorials/control/air-quality-fleet/" %}} -{{< /cards >}} +On top of querying sensor data with third-party tools, you can also [query it with the Python SDK](/data-ai/reference/data-client/) or [visualize it](/data-ai/data/visualize/). diff --git a/docs/how-tos/sensor-data-visualize.md b/docs/data-ai/data/visualize.md similarity index 77% rename from docs/how-tos/sensor-data-visualize.md rename to docs/data-ai/data/visualize.md index d53b8d55c5..4032b333c5 100644 --- a/docs/how-tos/sensor-data-visualize.md +++ b/docs/data-ai/data/visualize.md @@ -1,43 +1,87 @@ --- -title: "Visualize sensor data from any machines" -linkTitle: "Visualize and analyze sensor data" +linkTitle: "Visualize data" +title: "Visualize data" weight: 20 +layout: "docs" type: "docs" images: ["/services/icons/data-visualization.svg"] icon: true -description: "Visualize sensor data from the Viam app using popular tools like Grafana." aliases: - /data/visualize/ - /use-cases/sensor-data-visualize/ -languages: [] + - /how-tos/sensor-data-visualize/ viamresources: ["sensor", "data_manager"] platformarea: ["data", "fleet"] -level: "Beginner" -date: "2024-08-16" -# updated: "" # When the tutorial was last entirely checked -cost: "0" +date: "2024-12-04" +description: "Use teleop or grafana to visualize sensor data from the Viam app." --- -Once you have used the data management service to [capture data](/how-tos/collect-sensor-data/), you can visualize your data with a variety of third-party tools, including Grafana, Tableau, Google's Looker Studio, and more. -You can choose to visualize data from a component on one machine, from multiple components together, or from many components across a fleet of machines. +Once you have used the data management service to [capture data](/data-ai/capture-data/capture-sync/), you can visualize your data with either the Viam app's **TELEOP** page or a variety of third-party tools, including Grafana, Tableau, Google's Looker Studio, and more. -For example, you can configure data capture for several sensors across multiple machines to report the ambient operating temperature. -You can then visualize that data to easily understand how the ambient temperature affects your machines' operation. +## Teleop -You can do all of this using the [Viam app](https://app.viam.com/) user interface. You will not need to write any code. +Visualize sensor data on a widget with the Viam app's **TELEOP** page. -{{< alert title="In this page" color="tip" >}} +### Prerequisites -1. [Configuring data query](#configure-data-query). -1. [Visualizing data with third-party tools](#visualize-data-with-third-party-tools). +{{% expand "A configured machine with sensor components" %}} -{{< /alert >}} +Make sure your machine has at least one of the following: -## Prerequisites +- A movement sensor or sensor + +See [configure a machine](/operate/get-started/supported-hardware/) for more information. + +{{% /expand%}} + +### Configure a workplace with a sensor widget + +{{< table >}} +{{% tablestep %}} +**1. Create a workspace in the Viam app** + +Log in to the [Viam app](https://app.viam.com/). + +Navigate to the **FLEET** page's **TELEOP** tab. +Create a workspace by clicking **+ Create workspace**. +Give it a name. + +{{}} + +{{% /tablestep %}} +{{% tablestep %}} +**2. Add widgets** + +Click **Add widget** and select the appropriate widget for your machine. +Repeat as many times as necessary. + +Click **Add widget** and add a **GPS** widget for a movement sensor and a **time series** or **stat** widget for a sensor. + +{{% /tablestep %}} +{{% tablestep %}} +**3. Select a machine** + +Now, select a machine with which to make your teleop workspace come to life. +Select **Monitor** in the top right corner to leave editing mode. +Click **Select machine** and select your configured machine. + +Your dashboard now shows the configured widget for the data from your machine. +For example, a time series graph measuring noise over time: + +{{< imgproc src="/services/data/time-series.png" alt="Time series widget measuring noise over time." style="width:500px" resize="1200x" class="imgzoom fill" >}} + +{{% /tablestep %}} +{{< /table >}} + +## Third party tools + +Configure data query and use a third-party visualization tool like Grafana to visualize your sensor data. + +### Prerequisites {{% expand "Captured sensor data. Click to see instructions." %}} -Follow the guide to [capture sensor data](/how-tos/collect-sensor-data/). +Follow the docs to [capture data](/data-ai/capture-data/capture-sync/) from a sensor. {{% /expand%}} @@ -49,24 +93,21 @@ You must have the Viam CLI installed to configure querying with third-party tool {{% /expand%}} -## Configure data query +### Configure data query If you want to query data from third party tools, you have to configure data query to obtain the credentials you need to connect to the third party service. {{< readfile "/static/include/how-to/query-data.md" >}} -## Visualize data with third-party tools +### Visualize data with third-party tools When you sync captured data to Viam, that data is stored in the Viam organization’s MongoDB Atlas Data Federation instance. You can use third-party visualization tools, such as Grafana, to visualize your data. Your chosen third-party visualization tool must be able to connect to a [MongoDB Atlas Data Federation](https://www.mongodb.com/docs/atlas/data-federation/query/sql/connect/) instance as its data store. -{{}} - Select a tab below to learn how to configure your visualization tool for use with Viam: -{{< tabs >}} -{{< tab name="Grafana" >}} +#### Grafana {{< table >}} {{% tablestep %}} @@ -136,15 +177,14 @@ sensorData.readings.aggregate([ ) ``` -See the [guide on querying sensor data](/how-tos/sensor-data-query-with-third-party-tools/) for more information. +See the [guide on querying data](/data-ai/data/query/) for more information. {{% /tablestep %}} {{< /table >}} -{{% /tab %}} -{{< tab name="Other visualization tools" >}} +#### Other visualization tools {{< table >}} {{% tablestep %}} @@ -216,26 +256,18 @@ Some third-party visualization tools support the ability to directly query your You might use this functionality to visualize only a single day's metrics, limit the visualization to a select machine or component, or to isolate an outlier in your reported data, for example. While every third-party tool is different, you would generally query your data using either {{< glossary_tooltip term_id="sql" text="SQL" >}} or {{< glossary_tooltip term_id="mql" text="MQL" >}}. -See the [guide on querying sensor data](/how-tos/sensor-data-query-with-third-party-tools/) for more information. +See the [guide on querying data](/data-ai/data/query/) for more information. {{% /tablestep %}} {{< /table >}} -{{< /tab >}} -{{< /tabs >}} - -## Next steps +{{}} For more detailed instructions on using Grafana, including a full step-by-step configuration walkthrough, see [visualizing data with Grafana](/tutorials/services/visualize-data-grafana/). -On top of visualizing sensor data with third-party tools, you can also [query it with the Python SDK](/appendix/apis/data-client/) or [query it with the Viam app](/how-tos/sensor-data-query-with-third-party-tools/). - -{{< cards >}} -{{% card link="/appendix/apis/data-client/" %}} -{{% card link="/how-tos/sensor-data-query-with-third-party-tools/" %}} -{{< /cards >}} +On top of visualizing sensor data with third-party tools, you can also [query it with the Python SDK](/dev/reference/apis/data-client/) or [query it with the Viam app](/data-ai/data/query/). To see full projects using visualization, check out these resources: diff --git a/docs/data-ai/reference/_index.md b/docs/data-ai/reference/_index.md new file mode 100644 index 0000000000..326adf53af --- /dev/null +++ b/docs/data-ai/reference/_index.md @@ -0,0 +1,9 @@ +--- +linkTitle: "Reference" +title: "Reference" +weight: 500 +layout: "empty" +type: "docs" +empty_node: true +header_only: true +--- diff --git a/docs/data-ai/reference/data-client.md b/docs/data-ai/reference/data-client.md new file mode 100644 index 0000000000..ff74734765 --- /dev/null +++ b/docs/data-ai/reference/data-client.md @@ -0,0 +1,8 @@ +--- +title: "Upload and Retrieve Data with Viam's Data Client API" +linkTitle: "Data Client API" +weight: 30 +type: "docs" +layout: "empty" +canonical: "/dev/reference/apis/data-client/" +--- diff --git a/docs/data-ai/reference/ml-model-client.md b/docs/data-ai/reference/ml-model-client.md new file mode 100644 index 0000000000..208e700686 --- /dev/null +++ b/docs/data-ai/reference/ml-model-client.md @@ -0,0 +1,8 @@ +--- +title: "ML Model API" +linkTitle: "ML Model API" +weight: 30 +type: "docs" +layout: "empty" +canonical: "/dev/reference/apis/services/ml/" +--- diff --git a/docs/data-ai/reference/ml-training-client.md b/docs/data-ai/reference/ml-training-client.md new file mode 100644 index 0000000000..ca72b6850c --- /dev/null +++ b/docs/data-ai/reference/ml-training-client.md @@ -0,0 +1,8 @@ +--- +title: "Work with ML Training Jobs with Viam's ML Training API" +linkTitle: "ML Training Client API" +weight: 40 +type: "docs" +layout: "empty" +canonical: "/dev/reference/apis/services/ml/" +--- diff --git a/docs/data-ai/reference/vision-client.md b/docs/data-ai/reference/vision-client.md new file mode 100644 index 0000000000..f966b092ea --- /dev/null +++ b/docs/data-ai/reference/vision-client.md @@ -0,0 +1,8 @@ +--- +title: "Vision Service API" +linkTitle: "Vision Service API" +weight: 30 +type: "docs" +layout: "empty" +canonical: "/dev/reference/apis/services/vision/" +--- diff --git a/docs/dev/_index.md b/docs/dev/_index.md new file mode 100644 index 0000000000..502f12ff26 --- /dev/null +++ b/docs/dev/_index.md @@ -0,0 +1,801 @@ +--- +linkTitle: "Dev tools" +title: "Dev tools" +weight: 600 +layout: "docs" +type: "docs" +no_list: true +open_on_desktop: true +overview: true +noTitle: true +aliases: + - /docs/appendix/ +--- + +
+
+
+

Dev tools

+

+ Viam integrates with hardware and software on any device. Once you've set up your machines you can use the CLI and APIs to control and manage them. +

+ +
+ Robot illustration +
+
+
+ +
+ +Once you've set up your machine you can control your device and any attached physical hardware with [Viam APIs](/dev/reference/APIs), for example: + +{{< tabs class="horizontalheaders program" navheader="Examples">}} +{{% tab name="Drive a base" %}} + +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +async def moveInSquare(base): + for _ in range(4): + # Move forward 500mm at 500mm/s + await base.move_straight(velocity=500, distance=500) + # Spin 90 degrees at 100 degrees/s + await base.spin(velocity=100, angle=90) +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +func moveInSquare(ctx context.Context, base base.Base, logger logging.Logger) { + for i := 0; i < 4; i++ { + // Move forward 500mm at 500mm/s + base.MoveStraight(ctx, 600, 500.0, nil) + // Spin 90 degrees at 100 degrees/s + base.Spin(ctx, 90, 100.0, nil) + } +} +``` + +{{% /tab %}} +{{% tab name="TypeScript" %}} + +```ts +async function moveInSquare(baseClient: VIAM.BaseClient) { + for (let i = 0; i < 4; i++) { + // Move forward 500mm at 500mm/s + await baseClient.moveStraight(500, 500); + // Spin 90 degrees at 100 degrees/s + await baseClient.spin(90, 100); + } +} +``` + +{{% /tab %}} +{{% tab name="Flutter" %}} + +```dart +Future moveSquare() async { + for (var i=0; i<4; i++) { + // Move forward 500mm at 500mm/s + await base.moveStraight(500, 500); + // Spins the rover 90 degrees at 100 degrees/s + await base.spin(90, 100); + } +} +``` + +{{% /tab %}} +{{% tab name="C++" %}} + +```cpp +void move_in_square(std::shared_ptr base) { + for (int i = 0; i < 4; ++i) { + // Move forward 500mm at 500mm/s + base->move_straight(500, 500); + // Spins the rover 90 degrees at 100 degrees/s + base->spin(90, 100); + } +} +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Once you have configured a robotic base, you can drive it using the base API. + +[Drive a base →](/tutorials/control/drive-rover/) + +
+
+ +{{}} + +
+
+
+ +{{% /tab %}} +{{% tab name="Control motor" %}} + +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +async def spin_motor(motor): + # Turn the motor at 35% power forwards + await motor.set_power(power=0.35) + # Let the motor spin for 3 seconds + time.sleep(3) + # Stop the motor + await motor.stop() +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +func spinMotor(ctx context.Context, motor motor.Motor, logger logging.Logger) { + // Turn the motor at 35% power forwards + err = motor.SetPower(context.Background(), 0.35, nil) + // Let the motor spin for 3 seconds + time.Sleep(3 * time.Second) + // Stop the motor + err = motor.Stop(context.Background(), nil) +} +``` + +{{% /tab %}} +{{% tab name="TypeScript" %}} + +```ts +async function spinMotor(motorClient: VIAM.MotorClient) { + // Turn the motor at 35% power forwards + await motorClient.setPower(0.35); + // Let the motor spin for 3 seconds + const sleep = (ms: number) => + new Promise((resolve) => setTimeout(resolve, ms)); + await sleep(3000); + // Stop the motor + await motorClient.stop(); +} +``` + +{{% /tab %}} +{{% tab name="Flutter" %}} + +```dart +Future spinMotor() async { + // Turn the motor at 35% power forwards + await motorClient.setPower(0.35); + // Let the motor spin for 3 seconds + await Future.delayed(Duration(seconds: 3)); + // Stop the motor + await motorClient.stop(); +} +``` + +{{% /tab %}} +{{% tab name="C++" %}} + +```cpp +void spin_motor(std::shared_ptr motor) { + // Turn the motor at 35% power forwards + motor->set_power(0.35); + // Let the motor spin for 3 seconds + sleep(3); + // Stop the motor + motor->stop(); +} +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Once you have configured a motor, you can operate it using the motor API. + +[Control a motor →](/dev/reference/apis/components/motor/) + +
+
+ +{{}} + +
+
+
+{{% /tab %}} +{{% tab name="Get sensor reading" %}} +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +# Get the readings provided by the sensor. +co_2_monitor = Sensor.from_robot(machine, "co2-monitor") +co_2_monitor_return_value = await co_2_monitor.get_readings() +print(f"co2-monitor get_readings return value: {co_2_monitor_return_value}") +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +// Get the readings provided by the sensor. +co2Monitor, err := sensor.FromRobot(machine, "co2-monitor") +co2MonitorReturnValue, err := co2Monitor.Readings( + context.Background(), map[string]interface{}{}) +logger.Infof("co2-monitor return value: %+v", co2MonitorReturnValue) +``` + +{{% /tab %}} +{{% tab name="TypeScript" %}} + +```ts +// Get the readings provided by the sensor. +const co2MonitorClient = new VIAM.SensorClient(machine, "co2-monitor"); +const co2MonitorReturnValue = await co2MonitorClient.getReadings(); +console.log("co2-monitor return value:", co2MonitorReturnValue); +``` + +{{% /tab %}} +{{% tab name="Flutter" %}} + +```dart +// Get the readings provided by the sensor. +final co2Monitor = Sensor.fromRobot(client, "co2-monitor"); +var readings = await co2Monitor.readings(); +print(readings); +``` + +{{% /tab %}} +{{% tab name="C++" %}} + +```cpp +// Get the readings provided by the sensor. +auto co2monitor = machine->resource_by_name("co2-monitor"); +auto co2monitor_get_readings_return_value = co2monitor->get_readings(); +std::cout << "co2-monitor get_readings return value " << co2monitor_get_readings_return_value << "\n"; +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Once you have configured a physical sensor or anything else that provides measurements, you can get sensor readings using the sensor API. + +[Collect sensor data →](/data-ai/capture-data/capture-sync/) + +
+
+
+{{% /tab %}} +{{% tab name="Move an arm" %}} +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +# Command a joint position move: move the forearm of the arm slightly up +cmd_joint_positions = JointPositions(values=[0, 0, -30.0, 0, 0, 0]) +await my_arm_component.move_to_joint_positions( + positions=cmd_joint_positions) + +# Generate a simple pose move +100mm in the +Z direction of the arm +cmd_arm_pose = await my_arm_component.get_end_position() +cmd_arm_pose.z += 100.0 +await my_arm_component.move_to_position(pose=cmd_arm_pose) +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +// Command a joint position move: move the forearm of the arm slightly up +cmdJointPositions := &armapi.JointPositions{Values: []float64{0.0, 0.0, -30.0, 0.0, 0.0, 0.0}} +err = myArmComponent.MoveToJointPositions(context.Background(), cmdJointPositions, nil) + +// Generate a simple pose move +100mm in the +Z direction of the arm +currentArmPose, err := myArmComponent.EndPosition(context.Background(), nil) +adjustedArmPoint := currentArmPose.Point() +adjustedArmPoint.Z += 100.0 +cmdArmPose := spatialmath.NewPose(adjustedArmPoint, currentArmPose.Orientation()) + +err = myArmComponent.MoveToPosition(context.Background(), cmdArmPose, nil) +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Once you have configured a robotic arm, you can move it using the arm API. + +[Move a robotic arm →](/operate/mobility/move-arm/) + +
+
+ +{{}} + +
+
+
+{{% /tab %}} +{{% tab name="Operate custom hardware" %}} +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +my_button = Generic.from_robot(robot=machine, name="my_button") + +# Use a custom command to push the button 5 +command = {"cmd": "push_button", "button": 5} +result = await my_button.do_command(command) +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +myButton, err := generic.FromRobot(machine, "my_button") + +// Use a custom command to push the button 5 +command := map[string]interface{}{"cmd": "push_button", "button": 5} +result, err := myButton.DoCommand(context.Background(), command) +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Using the Viam Registry you can create _{{< glossary_tooltip term_id="resource" text="resources" >}}_ for additional hardware types or models and then deploy them to your machines. +You can use an existing component or service type or create generic resources. + +[Create a module →](/operate/get-started/other-hardware/hello-world-module/) + +
+
+
+{{% /tab %}} +{{% tab name="Virtual hardware & Custom logic" %}} +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +my_twilio_svc = Generic.from_robot(robot=machine, name="my_twilio_svc") + +# Use a custom command to send a text message with Twilio +command = {"to": "+1 234 567 8901", "body": "Hello world!"} +result = await my_twilio_svc.do_command(command) +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +myTwilioSvc, err := generic.FromRobot(machine, "my_twilio_svc") + +// Use a custom command to send a text message with Twilio +command := map[string]interface{}{"to": "+1 234 567 8901", "body": "Hello world!"} +result, err := myTwilioSvc.DoCommand(context.Background(), command) +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Using the Viam Registry you can turn services and your own custom business logic into _{{< glossary_tooltip term_id="module" text="modules" >}}_. You can then deploy your modules to your machines. + +[Create a module →](/operate/get-started/other-hardware/) + +
+
+
+{{% /tab %}} +{{< /tabs >}} + +
+
+
+ +You can also manage data, use higher level services, and manage your machines: + +{{< tabs class="horizontalheaders services" navheader="Examples">}} +{{% tab name="Use computer vision" %}} + +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +# Get image from camera stream on construction site +cam = Camera.from_robot(machine, "construction-site-cam") +img = await cam.get_image() + +# Use machine learning model to gather information from the image +hardhat_detector = VisionClient.from_robot(machine, "hardhat_detector") +detections = await hardhat_detector.get_detections(img) + +# Check whether a person is detected not wearing a hardhat +for d in detections: + if d.confidence > 0.8 and d.class_name == "NO-Hardhat": + print("Violation detected.") +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +// Get image from camera stream on construction site +myCamera, err := camera.FromRobot(machine, "construction-site-cam") +camStream, err := myCamera.Stream(context.Background()) +img, release, err := camStream.Next(context.Background()) +defer release() + +// Use machine learning model to gather information from the image +visService, err := vision.FromRobot(machine, "hardhat_detector") +detections, err := visService.Detections(context.Background(), img, nil) + +// Check whether a person is detected not wearing a hardhat +for i := 0; i < len(detections); i++ { + if (detection[i].confidence > 0.8) && (detection[i].class_name == "NO-Hardhat") { + logger.Info("Violation detected.") + } +} +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Computer vision enables your machine to use connected cameras to interpret the world around it. +With inferences about a machine's surroundings, you can program machines to act based on this input using the vision service API. + +[Try the vision service →](/tutorials/projects/helmet/) + +
+
+ +{{}} + +
+
+
+ +{{% /tab %}} +{{% tab name="Query captured data" %}} + +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +# Tag data from the my_camera component +my_filter = create_filter(component_name="my_camera") +tags = ["frontview", "trainingdata"] +res = await data_client.add_tags_to_binary_data_by_filter(tags, my_filter) + +# Query sensor data by filter +my_data = [] +my_filter = create_filter( + component_name="sensor-1", + start_time=Timestamp('2024-10-01 10:00:00', tz='US/Pacific'), + end_time=Timestamp('2024-10-12 18:00:00', tz='US/Pacific') +) +tabular_data, count, last = await data_client.tabular_data_by_filter( + my_filter, last=None) +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +You can query synced sensor data, images, and any other binary or timeseries data from all your machines using the data client API. + +[Learn about Data Management →](/data-ai/capture-data/capture-sync/) + +
+
+
+{{% /tab %}} +{{% tab name="Move arms" %}} +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +# Add a table obstacle to a WorldState +table_origin = Pose(x=-202.5, y=-546.5, z=-19.0) +table_dimensions = Vector3(x=635.0, y=1271.0, z=38.0) +table_object = Geometry(center=table_origin, + box=RectangularPrism(dims_mm=table_dimensions)) +obstacles_in_frame = GeometriesInFrame(reference_frame="world", + geometries=[table_object]) +world_state = WorldState(obstacles=[obstacles_in_frame]) + +# Destination pose to move to +dest_in_frame = PoseInFrame( + reference_frame="world", + pose=Pose(x=510.0, y=0.0, z=526.0, o_x=0.7, o_y=0.0, o_z=-0.7, theta=0.0)) + +# Move arm to destination pose +motion_service = MotionClient.from_robot(robot, "builtin") +await motion_service.move( + component_name=Arm.get_resource_name("myArm"), + destination=dest_in_frame, world_state=world_state) +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +// Add a table obstacle to a WorldState +obstacles := make([]spatialmath.Geometry, 0) +tableOrigin := spatialmath.NewPose( + r3.Vector{X: 0.0, Y: 0.0, Z: -10.0}, + &spatialmath.OrientationVectorDegrees{OX: 0.0, OY: 0.0, OZ: 1.0, Theta: 0.0}, +) +tableDimensions := r3.Vector{X: 2000.0, Y: 2000.0, Z: 20.0} +tableObj, err := spatialmath.NewBox(tableOrigin, tableDimensions, "table") +obstacles = append(obstacles, tableObj) +obstaclesInFrame := referenceframe.NewGeometriesInFrame(referenceframe.World, obstacles) +worldState, err := referenceframe.NewWorldState([]*referenceframe.GeometriesInFrame{obstaclesInFrame}, nil) + +// Destination pose to move to +destinationPose := spatialmath.NewPose( + r3.Vector{X: 510.0, Y: 0.0, Z: 526.0}, + &spatialmath.OrientationVectorDegrees{OX: 0.7071, OY: 0.0, OZ: -0.7071, Theta: 0.0}, +) +destPoseInFrame := referenceframe.NewPoseInFrame( + referenceframe.World, destinationPose) + +// Move arm to destination pose +motionService, err := motion.FromRobot(robot, "builtin") +_, err = motionService.Move(context.Background(), arm.Named("myArm"), destPoseInFrame, worldState, nil, nil) +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +The motion service enables your machine to plan and move relative to itself, other machines, and the world. You can use it with the motion service API. + +[Try the motion service →](/tutorials/services/plan-motion-with-arm-gripper/) + +
+
+ +{{}} + +
+
+
+{{% /tab %}} +{{% tab name="Navigate bases" %}} +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +my_nav = NavigationClient.from_robot(robot=robot, name="my_nav_service") + +# Create a new waypoint at the specified latitude and longitude +location = GeoPoint(latitude=40.76275, longitude=-73.96) + +# Add waypoint to the service's data storage +await my_nav.add_waypoint(point=location) + +my_nav = NavigationClient.from_robot(robot=robot, name="my_nav_service") + +# Set the service to operate in waypoint mode and begin navigation +await my_nav.set_mode(Mode.ValueType.MODE_WAYPOINT) +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +myNav, err := navigation.FromRobot(robot, "my_nav_service") + +// Create a new waypoint at the specified latitude and longitude +location = geo.NewPoint(40.76275, -73.96) + +// Add waypoint to the service's data storage +err := myNav.AddWaypoint(context.Background(), location, nil) + +myNav, err := navigation.FromRobot(robot, "my_nav_service") + +// Set the service to operate in waypoint mode and begin navigation +mode, err := myNav.SetMode(context.Background(), Mode.MODE_WAYPOINT, nil) +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Autonomously navigate a machine to defined waypoints using the navigation service API. + +[Try the navigation service →](/tutorials/services/navigate-with-rover-base/) + +
+
+
+{{% /tab %}} +{{% tab name="Check machine status" %}} + +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +# Get all machines in a location +machines = await cloud.list_robots(location_id="abcde1fghi") + +for m in machines: + # Connect and get status information or latest logs + machine_parts = await cloud.get_robot_parts(m.id) + main_part = next(filter(lambda part: part.main_part, machine_parts), None) + + try: + # Get status for machine + machine = await connect(main_part.fqdn) + status = await machine.get_machine_status() + except ConnectionError: + # If no connection can be made, get last logs + logs = await cloud.get_robot_part_logs( + robot_part_id=main_part.id, num_log_entries=5) +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Get status information and logs from all your deployed machines using the fleet management API. + +[Learn about Platform APIs →](/dev/reference/apis/#platform-apis) + +
+
+
+ +{{% /tab %}} +{{% tab name="Train ML models" %}} + +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +# Start a training job to create a classification model based on the dataset +job_id = await ml_training_client.submit_training_job( + org_id="abbc1c1c-d2e3-5f67-ab8c-de912345f678", + dataset_id="12ab3cd4e56f7abc89de1fa2", + model_name="recognize_gestures", + model_version="1", + model_type=ModelType.MODEL_TYPE_MULTI_LABEL_CLASSIFICATION, + tags=["follow", "stop"] +) + +# Get status information for training job +job_metadata = await ml_training_client.get_training_job( + id=job_id) +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Build machine learning models based on your machines' data any time using the ML training client API + +[Train and deploy ML models →](/data-ai/ai/train-tflite/) + +
+
+
+ +{{% /tab %}} +{{% tab name="Manage access and resources" %}} + +
+ +{{< tabs >}} +{{% tab name="Python" %}} + +```python +# Create a new machine +new_machine_id = await cloud.new_robot( + name="new-machine", location_id="abcde1fghi") + +# Get organization associated with authenticated user / API key +org_list = await cloud.list_organizations() + +# Create a new API key with owner access for the new machine +auth = APIKeyAuthorization( + role="owner", + resource_type="robot", + resource_id=new_machine_id +) +api_key, api_key_id = await cloud.create_key( + org_list[0].id, [auth], "key_for_new_machine") +``` + +{{% /tab %}} +{{< /tabs >}} + +
+
+ +Viam allows you to organize and manage any number of machines. When collaborating with others, you can assign permissions using Role-Based Access Control (RBAC). Programmatically you can do this with the fleet management API. + +[Learn about access control →](/manage/manage/rbac/) + +
+
+
+ +{{% /tab %}} +{{< /tabs >}} diff --git a/docs/appendix/contributing.md b/docs/dev/contributing.md similarity index 98% rename from docs/appendix/contributing.md rename to docs/dev/contributing.md index 669d5d4129..edd8c7b542 100644 --- a/docs/appendix/contributing.md +++ b/docs/dev/contributing.md @@ -62,7 +62,7 @@ The docs use the [Diátaxis Framework](https://diataxis.fr/) as the basis of t - **Explanation (conceptual)**: An understanding-oriented piece of content. This content provides background knowledge on a topic and tends to be referenced in how-to guides and tutorials. - For example the [`viam-server` page](/architecture/viam-server/) or the [Registry page](/registry/). + For example the [`viam-server` page](/operate/reference/viam-server/). It’s useful to have a real or imagined "Why?" question to serve as a prompt. {{< expand "Click to view template" >}} @@ -94,7 +94,7 @@ The docs use the [Diátaxis Framework](https://diataxis.fr/) as the basis of t - **How-to Guide (procedural)**: A task-oriented piece of content that directs a reader to perform actions step by step to complete a task, like instructions to sauté onions. Generally starts with a description of the task and things to consider, and then provides a set of numbered steps to follow. - For example, the [Installation page](/installation/viam-server-setup/) or the [Find module page](/registry/modular-resources/). + For example, the [Move a base](/operate/mobility/move-base/) page. {{< expand "Click to view template" >}} @@ -159,7 +159,7 @@ The docs use the [Diátaxis Framework](https://diataxis.fr/) as the basis of t {{< /expand >}} - **Reference**: A concise, information-oriented piece of content that generally starts with an overview/introduction and then a list of some kind (configuration options, API methods, etc.). - Examples include the [API pages](/appendix/apis/) as well as [component and service pages](/components/arm/). + Examples include the [API pages](/dev/reference/apis/) as well as [component and service pages](/operate/reference/components/arm/). Example template: [Component template](https://github.com/viamrobotics/docs/blob/main/docs/components/component/_index.md). diff --git a/docs/dev/reference/_index.md b/docs/dev/reference/_index.md new file mode 100644 index 0000000000..8b203fb9c0 --- /dev/null +++ b/docs/dev/reference/_index.md @@ -0,0 +1,10 @@ +--- +linkTitle: "Reference" +title: "Reference" +weight: 300 +layout: "empty" +type: "docs" +empty_node: true +open_on_desktop: true +header_only: true +--- diff --git a/docs/dev/reference/apis/_index.md b/docs/dev/reference/apis/_index.md new file mode 100644 index 0000000000..069c5ccd89 --- /dev/null +++ b/docs/dev/reference/apis/_index.md @@ -0,0 +1,88 @@ +--- +title: "Viam's Client APIs" +linkTitle: "APIs" +weight: 10 +type: "docs" +description: "Access and control your machine or fleet with the SDKs' client libraries for the resource and robot APIs." +images: ["/general/code.png"] +tags: ["client", "sdk", "viam-server", "networking", "apis", "robot api"] +aliases: + - /program/sdks/ + - /program/apis/ + - /build/program/apis/ + - /appendix/apis/ +no_list: true +date: "2024-10-01" +# updated: "" # When the content was last entirely checked +--- + +Every Viam {{< glossary_tooltip term_id="resource" text="resource" >}} exposes an [application programming interface (API)](https://en.wikipedia.org/wiki/API) described through [protocol buffers](https://developers.google.com/protocol-buffers). + +The API methods provided by the SDKs for each of these resource APIs wrap gRPC client requests to the machine when you execute your program, providing you a convenient interface for accessing information about and controlling the {{< glossary_tooltip term_id="resource" text="resources" >}} you have [configured](/operate/get-started/supported-hardware/) on your machine. + +## Platform APIs + +{{< cards >}} +{{% manualcard link="/dev/reference/apis/fleet/" title="Fleet Management API" %}} + +Create and manage organizations, locations, and machines, get logs from individual machines, and manage fragments and permissions. + +{{% /manualcard %}} +{{% manualcard link="/dev/reference/apis/data-client/" title="Data Client API" %}} + +Upload, download, filter, tag or perform other tasks on data like images or sensor readings. + +{{% /manualcard %}} +{{% manualcard link="/dev/reference/apis/robot/" title="Machine Management API" %}} + +Manage your machines: connect to your machine, retrieve status information, and send commands remotely. + +{{% /manualcard %}} +{{% manualcard link="/dev/reference/apis/ml-training-client/" title="ML Training Client API" %}} + +Submit and manage ML training jobs running on the Viam app. + +{{% /manualcard %}} +{{% manualcard link="/dev/reference/apis/billing-client/" title="Billing Client API" %}} + +Retrieve billing information from the Viam app. + +{{% /manualcard %}} + +{{< /cards >}} + +## Component APIs + +These APIs provide interfaces for controlling and getting information from the {{< glossary_tooltip term_id="component" text="components" >}} of a machine: + +{{< cards >}} +{{< card link="/dev/reference/apis/components/arm/" customTitle="Arm API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/base/" customTitle="Base API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/board/" customTitle="Board API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/camera/" customTitle="Camera API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/encoder/" customTitle="Encoder API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/gantry/" customTitle="Gantry API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/generic/" customTitle="Generic API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/gripper/" customTitle="Gripper API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/input-controller/" customTitle="Input controller API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/motor/" customTitle="Motor API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/movement-sensor/" customTitle="Movement sensor API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/power-sensor/" customTitle="Power sensor API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/sensor/" customTitle="Sensor API" noimage="True" >}} +{{< card link="/dev/reference/apis/components/servo/" customTitle="Servo API" noimage="True" >}} +{{< /cards >}} + +## Service APIs + +These APIs provide interfaces for controlling and getting information from the services you configured on a machine. + +{{< cards >}} +{{% card link="/dev/reference/apis/services/data/" customTitle="Data management service API" noimage="True" %}} +{{% card link="/dev/reference/apis/services/vision/" customTitle="Vision service API" noimage="True" %}} +{{% card link="/dev/reference/apis/services/ml/" customTitle="ML model service API" noimage="True" %}} +{{% card link="/dev/reference/apis/services/motion/" customTitle="Motion service API" noimage="True" %}} +{{% card link="/dev/reference/apis/services/navigation/" customTitle="Navigation service API" noimage="True" %}} +{{% card link="/dev/reference/apis/services/generic/" customTitle="Generic service API" noimage="True" %}} +{{% card link="/dev/reference/apis/services/slam/" customTitle="SLAM service API" noimage="True" %}} +{{% card link="/dev/reference/apis/services/base-rc/" customTitle="Base Remote Control service API" noimage="True" %}} +{{< /cards >}} diff --git a/docs/appendix/apis/billing-client.md b/docs/dev/reference/apis/billing-client.md similarity index 91% rename from docs/appendix/apis/billing-client.md rename to docs/dev/reference/apis/billing-client.md index ed4d0df8bb..422c2dc0cb 100644 --- a/docs/appendix/apis/billing-client.md +++ b/docs/dev/reference/apis/billing-client.md @@ -8,6 +8,7 @@ tags: ["cloud", "sdk", "viam-server", "networking", "apis", "robot api"] aliases: - /program/apis/billing-client/ - /build/program/apis/billing-client/ + - /appendix/apis/billing-client/ date: "2024-09-14" # updated: "" # When the content was last entirely checked --- @@ -29,11 +30,11 @@ The billing client API supports the following methods: To use the Viam billing client API, you first need to instantiate a [`ViamClient`](https://python.viam.dev/autoapi/viam/app/viam_client/index.html#viam.app.viam_client.ViamClient) and then instantiate a [`BillingClient`](https://python.viam.dev/autoapi/viam/app/billing_client/index.html#viam.app.billing_client.BillingClient). You will also need an API key and API key ID to authenticate your session. -Your API key needs to have [Org owner permissions](/cloud/rbac/#organization-settings-and-roles) to use the billing client API. +Your API key needs to have [Org owner permissions](/manage/manage/rbac/#organization-settings-and-roles) to use the billing client API. To get an API key (and corresponding ID), you have two options: - [Create an API key using the Viam app](/cloud/rbac/#add-an-api-key) -- [Create an API key using the Viam CLI](/cli/#create-an-organization-api-key) +- [Create an API key using the Viam CLI](/dev/tools/cli/#create-an-organization-api-key) The following example instantiates a `ViamClient`, authenticating with an API key, and then instantiates a `BillingClient`: diff --git a/docs/appendix/apis/components/_index.md b/docs/dev/reference/apis/components/_index.md similarity index 77% rename from docs/appendix/apis/components/_index.md rename to docs/dev/reference/apis/components/_index.md index 9c8eba8326..33a4cfc90e 100644 --- a/docs/appendix/apis/components/_index.md +++ b/docs/dev/reference/apis/components/_index.md @@ -4,6 +4,6 @@ title: "Component APIs" weight: 5 empty_node: true layout: "empty" -canonical: "/appendix/apis/" +canonical: "/dev/reference/apis/" type: "docs" --- diff --git a/docs/appendix/apis/components/arm.md b/docs/dev/reference/apis/components/arm.md similarity index 85% rename from docs/appendix/apis/components/arm.md rename to docs/dev/reference/apis/components/arm.md index 8448d550ef..460bc8f62f 100644 --- a/docs/appendix/apis/components/arm.md +++ b/docs/dev/reference/apis/components/arm.md @@ -3,14 +3,17 @@ title: "Arm API" linkTitle: "Arm" weight: 10 type: "docs" -description: "Give commands to your arm components for linear motion planning, including self-collision prevention and obstacle avoidance." +description: "Give commands to your arm components for linear motion planning." icon: true images: ["/icons/components/arm.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/components/arm/ # updated: "" # When the content was last entirely checked --- -The arm API allows you to give commands to your [arm components](/components/arm/) for linear motion planning, including self-collision prevention and obstacle avoidance. +The arm API allows you to give commands to your [arm components](/operate/reference/components/arm/) for linear motion planning with self-collision prevention. +If you want the arm to avoid obstacles, or you want to plan complex motion in an automated way, use the [motion API](/dev/reference/apis/services/motion/). The arm component supports the following methods: diff --git a/docs/appendix/apis/components/base.md b/docs/dev/reference/apis/components/base.md similarity index 87% rename from docs/appendix/apis/components/base.md rename to docs/dev/reference/apis/components/base.md index 31203c21f8..a1de692338 100644 --- a/docs/appendix/apis/components/base.md +++ b/docs/dev/reference/apis/components/base.md @@ -7,10 +7,12 @@ description: "Give commands for moving all configured components attached to a m icon: true images: ["/icons/components/base.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/components/base/ # updated: "" # When the content was last entirely checked --- -The base API allows you to give commands to your [base components](/components/base/) for moving all configured components attached to a platform as a whole without needing to send commands to individual components. +The base API allows you to give commands to your [base components](/operate/reference/components/base/) for moving all configured components attached to a platform as a whole without needing to send commands to individual components. The base component supports the following methods: diff --git a/docs/appendix/apis/components/board.md b/docs/dev/reference/apis/components/board.md similarity index 89% rename from docs/appendix/apis/components/board.md rename to docs/dev/reference/apis/components/board.md index 20321ad601..12b2f9d881 100644 --- a/docs/appendix/apis/components/board.md +++ b/docs/dev/reference/apis/components/board.md @@ -7,10 +7,12 @@ description: "Give commands for setting GPIO pins to high or low, setting PWM, a icon: true images: ["/icons/components/board.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/components/board/ # updated: "" # When the content was last entirely checked --- -The board API allows you to give commands to your [board components](/components/board/) for setting GPIO pins to high or low, setting PWM, and working with analog and digital interrupts. +The board API allows you to give commands to your [board components](/operate/reference/components/board/) for setting GPIO pins to high or low, setting PWM, and working with analog and digital interrupts. The board component supports the following methods: diff --git a/docs/appendix/apis/components/camera.md b/docs/dev/reference/apis/components/camera.md similarity index 92% rename from docs/appendix/apis/components/camera.md rename to docs/dev/reference/apis/components/camera.md index 389a0f6ae8..1156efd89d 100644 --- a/docs/appendix/apis/components/camera.md +++ b/docs/dev/reference/apis/components/camera.md @@ -7,10 +7,12 @@ description: "Give commands for getting images or point clouds." icon: true images: ["/icons/components/camera.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/components/camera/ # updated: "" # When the content was last entirely checked --- -The camera API allows you to give commands to your [camera components](/components/camera/) for getting images or point clouds. +The camera API allows you to give commands to your [camera components](/operate/reference/components/camera/) for getting images or point clouds. The API for camera components allows you to: diff --git a/docs/appendix/apis/components/encoder.md b/docs/dev/reference/apis/components/encoder.md similarity index 90% rename from docs/appendix/apis/components/encoder.md rename to docs/dev/reference/apis/components/encoder.md index f360c18b95..ff1d886b67 100644 --- a/docs/appendix/apis/components/encoder.md +++ b/docs/dev/reference/apis/components/encoder.md @@ -7,10 +7,12 @@ description: "Give commands for getting the position of a motor or a joint in ti icon: true images: ["/icons/components/encoder.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/components/encoder/ # updated: "" # When the content was last entirely checked --- -The encoder API allows you to give commands to your [encoder components](/components/encoder/) for getting the position of a motor or a joint in ticks or degrees. +The encoder API allows you to give commands to your [encoder components](/operate/reference/components/encoder/) for getting the position of a motor or a joint in ticks or degrees. The encoder component supports the following methods: diff --git a/docs/appendix/apis/components/gantry.md b/docs/dev/reference/apis/components/gantry.md similarity index 90% rename from docs/appendix/apis/components/gantry.md rename to docs/dev/reference/apis/components/gantry.md index 5d474ee2bc..fa98c30eb7 100644 --- a/docs/appendix/apis/components/gantry.md +++ b/docs/dev/reference/apis/components/gantry.md @@ -7,10 +7,12 @@ description: "Give commands for coordinated control of one or more linear actuat icon: true images: ["/icons/components/gantry.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/components/gantry/ # updated: "" # When the content was last entirely checked --- -The gantry API allows you to give commands to your [gantry components](/components/gantry/) for coordinated control of one or more linear actuators. +The gantry API allows you to give commands to your [gantry components](/operate/reference/components/gantry/) for coordinated control of one or more linear actuators. The gantry component supports the following methods: diff --git a/docs/appendix/apis/components/generic.md b/docs/dev/reference/apis/components/generic.md similarity index 90% rename from docs/appendix/apis/components/generic.md rename to docs/dev/reference/apis/components/generic.md index 1503914b0f..705ebc3e36 100644 --- a/docs/appendix/apis/components/generic.md +++ b/docs/dev/reference/apis/components/generic.md @@ -7,10 +7,12 @@ description: "Give commands for running custom model-specific commands using DoC icon: true images: ["/icons/components/generic.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/components/generic/ # updated: "" # When the content was last entirely checked --- -The generic API allows you to give commands to your [generic components](/components/generic/) for running model-specific commands using [`DoCommand`](/appendix/apis/components/generic/#docommand). +The generic API allows you to give commands to your [generic components](/operate/reference/components/generic/) for running model-specific commands using [`DoCommand`](/dev/reference/apis/components/generic/#docommand). The generic component supports the following method: diff --git a/docs/appendix/apis/components/gripper.md b/docs/dev/reference/apis/components/gripper.md similarity index 91% rename from docs/appendix/apis/components/gripper.md rename to docs/dev/reference/apis/components/gripper.md index 35000b696a..3251f9c967 100644 --- a/docs/appendix/apis/components/gripper.md +++ b/docs/dev/reference/apis/components/gripper.md @@ -7,10 +7,12 @@ description: "Give commands for opening and closing a gripper device." icon: true images: ["/icons/components/gripper.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/components/gripper/ # updated: "" # When the content was last entirely checked --- -The gripper API allows you to give commands to your [gripper components](/components/gripper/) for opening and closing a device. +The gripper API allows you to give commands to your [gripper components](/operate/reference/components/gripper/) for opening and closing a device. The gripper component supports the following methods: diff --git a/docs/appendix/apis/components/input-controller.md b/docs/dev/reference/apis/components/input-controller.md similarity index 96% rename from docs/appendix/apis/components/input-controller.md rename to docs/dev/reference/apis/components/input-controller.md index 3bd12e8729..9265d28fa8 100644 --- a/docs/appendix/apis/components/input-controller.md +++ b/docs/dev/reference/apis/components/input-controller.md @@ -8,10 +8,12 @@ description: "Give commands to register callbacks for events, allowing you to us icon: true images: ["/icons/components/controller.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/components/input-controller/ # updated: "" # When the content was last entirely checked --- -The input controller API allows you to give commands to your [input controller components](/components/input-controller/) for configuring callbacks for events, allowing you to configure input devices to control your machines. +The input controller API allows you to give commands to your [input controller components](/operate/reference/components/input-controller/) for configuring callbacks for events, allowing you to configure input devices to control your machines. The input controller component supports the following methods: @@ -64,14 +66,14 @@ Each `Event` object represents a singular event from the input device, and has f 1. `Time`: `time.Time` the event occurred. 2. `Event`: `EventType` indicating the type of event (for example, a specific button press or axis movement). -3. `Control`: `Control` indicating which [Axis](#axis-controls), [Button](/appendix/apis/components/input-controller/#button-controls), or Pedal on the controller has been changed. -4. `Value`: `float64` indicating the position of an [Axis](/appendix/apis/components/input-controller/#axis-controls) or the state of a [Button](/appendix/apis/components/input-controller/#button-controls) on the specified control. +3. `Control`: `Control` indicating which [Axis](#axis-controls), [Button](/dev/reference/apis/components/input-controller/#button-controls), or Pedal on the controller has been changed. +4. `Value`: `float64` indicating the position of an [Axis](/dev/reference/apis/components/input-controller/#axis-controls) or the state of a [Button](/dev/reference/apis/components/input-controller/#button-controls) on the specified control. #### EventType field A string-like type indicating the specific type of input event, such as a button press or axis movement. -- To select for events of all type when registering callback function with [RegisterControlCallback](/appendix/apis/components/input-controller/#registercontrolcallback), you can use `AllEvents` as your `EventType`. +- To select for events of all type when registering callback function with [RegisterControlCallback](/dev/reference/apis/components/input-controller/#registercontrolcallback), you can use `AllEvents` as your `EventType`. - The registered function is then called in addition to any other callback functions you've registered, every time an `Event` happens on your controller. This is useful for debugging without interrupting normal controls, or for capturing extra or unknown events. diff --git a/docs/appendix/apis/components/motor.md b/docs/dev/reference/apis/components/motor.md similarity index 91% rename from docs/appendix/apis/components/motor.md rename to docs/dev/reference/apis/components/motor.md index 75e8e0fe82..a5d6872812 100644 --- a/docs/appendix/apis/components/motor.md +++ b/docs/dev/reference/apis/components/motor.md @@ -7,10 +7,12 @@ description: "Give commands to operate a motor or get its current status." icon: true images: ["/icons/components/motor.svg"] date: "2024-10-10" +aliases: + - /appendix/apis/components/motor/ # updated: "" # When the content was last entirely checked --- -The motor API allows you to give commands to your [motor components](/components/motor/) for operating a motor or getting its current status. +The motor API allows you to give commands to your [motor components](/operate/reference/components/motor/) for operating a motor or getting its current status. The motor component supports the following methods: diff --git a/docs/appendix/apis/components/movement-sensor.md b/docs/dev/reference/apis/components/movement-sensor.md similarity index 89% rename from docs/appendix/apis/components/movement-sensor.md rename to docs/dev/reference/apis/components/movement-sensor.md index 4be1c5ef6d..3590fa7525 100644 --- a/docs/appendix/apis/components/movement-sensor.md +++ b/docs/dev/reference/apis/components/movement-sensor.md @@ -7,10 +7,12 @@ description: "Give commands for getting the current GPS location, linear velocit icon: true images: ["/icons/components/imu.svg"] date: "2022-10-10" +aliases: + - /appendix/apis/components/movement-sensor/ # updated: "" # When the content was last entirely checked --- -The movement sensor API allows you to give commands to your [movement sensor components](/components/movement-sensor/) for getting a GPS location, linear velocity and acceleration, angular velocity and acceleration and heading. +The movement sensor API allows you to give commands to your [movement sensor components](/operate/reference/components/movement-sensor/) for getting a GPS location, linear velocity and acceleration, angular velocity and acceleration and heading. Different movement sensors provide different data, so be aware that not all of the methods below are supported by all movement sensors. diff --git a/docs/appendix/apis/components/power-sensor.md b/docs/dev/reference/apis/components/power-sensor.md similarity index 90% rename from docs/appendix/apis/components/power-sensor.md rename to docs/dev/reference/apis/components/power-sensor.md index 0cd6ade9e4..81a8aca1b1 100644 --- a/docs/appendix/apis/components/power-sensor.md +++ b/docs/dev/reference/apis/components/power-sensor.md @@ -7,10 +7,12 @@ description: "Commands for getting measurements of voltage, current, and power c icon: true images: ["/icons/components/power-sensor.svg"] date: "2022-10-10" +aliases: + - /appendix/apis/components/power-sensor/ # updated: "" # When the content was last entirely checked --- -The power sensor API allows you to give commands to your [power sensor components](/components/power-sensor/) for getting measurements of voltage, current, and power consumption. +The power sensor API allows you to give commands to your [power sensor components](/operate/reference/components/power-sensor/) for getting measurements of voltage, current, and power consumption. The power sensor component supports the following methods: diff --git a/docs/appendix/apis/components/sensor.md b/docs/dev/reference/apis/components/sensor.md similarity index 93% rename from docs/appendix/apis/components/sensor.md rename to docs/dev/reference/apis/components/sensor.md index c28665efce..c258271948 100644 --- a/docs/appendix/apis/components/sensor.md +++ b/docs/dev/reference/apis/components/sensor.md @@ -7,10 +7,12 @@ description: "Commands for getting sensor readings." icon: true images: ["/icons/components/sensor.svg"] date: "2022-10-10" +aliases: + - /appendix/apis/components/sensor/ # updated: "" # When the content was last entirely checked --- -The sensor API allows you to get measurements from your [sensor components](/components/sensor/). +The sensor API allows you to get measurements from your [sensor components](/operate/reference/components/sensor/). The sensor component supports the following methods: diff --git a/docs/appendix/apis/components/servo.md b/docs/dev/reference/apis/components/servo.md similarity index 89% rename from docs/appendix/apis/components/servo.md rename to docs/dev/reference/apis/components/servo.md index c586b34c98..952cf7c93c 100644 --- a/docs/appendix/apis/components/servo.md +++ b/docs/dev/reference/apis/components/servo.md @@ -7,10 +7,12 @@ description: "Commands for controlling the angular position of a servo precisely icon: true images: ["/icons/components/servo.svg"] date: "2022-10-10" +aliases: + - /appendix/apis/components/servo/ # updated: "" # When the content was last entirely checked --- -The servo API allows you to give commands to your [servo components](/components/servo/) for controlling the angular position of a servo precisely or getting its current status. +The servo API allows you to give commands to your [servo components](/operate/reference/components/servo/) for controlling the angular position of a servo precisely or getting its current status. The servo component supports the following methods: diff --git a/docs/appendix/apis/data-client.md b/docs/dev/reference/apis/data-client.md similarity index 96% rename from docs/appendix/apis/data-client.md rename to docs/dev/reference/apis/data-client.md index c04d26e03a..3c229cde1f 100644 --- a/docs/appendix/apis/data-client.md +++ b/docs/dev/reference/apis/data-client.md @@ -20,6 +20,7 @@ tags: aliases: - /program/apis/data-client/ - /build/program/apis/data-client/ + - /appendix/apis/data-client/ date: "2024-09-19" # updated: "" # When the content was last entirely checked --- @@ -54,7 +55,7 @@ You will also need an API key and API key ID to authenticate your session. To get an API key (and corresponding ID), you have two options: - [Create an API key using the Viam app](/cloud/rbac/#add-an-api-key) -- [Create an API key using the Viam CLI](/cli/#create-an-organization-api-key) +- [Create an API key using the Viam CLI](/dev/tools/cli/#create-an-organization-api-key) The following example instantiates a `ViamClient`, authenticating with an API key, and then instantiates a `DataClient`: diff --git a/docs/appendix/apis/fleet.md b/docs/dev/reference/apis/fleet.md similarity index 94% rename from docs/appendix/apis/fleet.md rename to docs/dev/reference/apis/fleet.md index 00adfc624a..3fc9af1b49 100644 --- a/docs/appendix/apis/fleet.md +++ b/docs/dev/reference/apis/fleet.md @@ -17,13 +17,13 @@ tags: ] aliases: - /program/apis/fleet/ - - /appendix/apis/fleet/ + - /dev/reference/apis/fleet/ - /build/program/apis/fleet/ date: "2024-09-20" # updated: "" # When the content was last entirely checked --- -The fleet management API allows you to [manage your machine fleet](/fleet/) with code instead of with the graphical interface of the [Viam app](https://app.viam.com/). +The fleet management API allows you to manage your machine fleet with code instead of with the graphical interface of the [Viam app](https://app.viam.com/). With it you can - create and manage organizations, locations, and individual machines diff --git a/docs/appendix/apis/ml-training-client.md b/docs/dev/reference/apis/ml-training-client.md similarity index 98% rename from docs/appendix/apis/ml-training-client.md rename to docs/dev/reference/apis/ml-training-client.md index 1401bd2168..1214132237 100644 --- a/docs/appendix/apis/ml-training-client.md +++ b/docs/dev/reference/apis/ml-training-client.md @@ -8,6 +8,7 @@ tags: ["cloud", "sdk", "viam-server", "networking", "apis", "ml model", "ml"] aliases: - /program/apis/ml_training-client/ - /build/program/apis/ml-training-client/ + - /appendix/apis/ml-training-client/ date: "2024-09-16" # updated: "" # When the content was last entirely checked --- diff --git a/docs/appendix/apis/robot.md b/docs/dev/reference/apis/robot.md similarity index 97% rename from docs/appendix/apis/robot.md rename to docs/dev/reference/apis/robot.md index 53b94bbca2..8dfe7ac2aa 100644 --- a/docs/appendix/apis/robot.md +++ b/docs/dev/reference/apis/robot.md @@ -8,11 +8,12 @@ tags: ["robot state", "sdk", "apis", "robot api"] aliases: - /program/apis/robot/ - /build/program/apis/robot/ + - /appendix/apis/robot/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- -The _machine API_ allows you to connect to your machine from within a supported [Viam SDK](/appendix/apis/), retrieve status information, and send commands remotely. +The _machine API_ allows you to connect to your machine from within a supported [Viam SDK](/dev/reference/apis/), retrieve status information, and send commands remotely. The machine API is supported for use with the [Viam Python SDK](https://python.viam.dev/autoapi/viam/robot/client/index.html#viam.robot.client.RobotClient), the [Viam Go SDK](https://pkg.go.dev/go.viam.com/rdk/robot/client#RobotClient), and the [Viam C++ SDK](https://cpp.viam.dev/classviam_1_1sdk_1_1RobotClient.html). @@ -24,7 +25,7 @@ The machine API supports the following methods: To interact with the machine API with Viam's SDKs, instantiate a `RobotClient` ([gRPC](https://grpc.io/) client) and use that class for all interactions. -To find the API key, API key ID, and machine address, go to [Viam app](https://app.viam.com/), select the machine you wish to connect to, and go to the [**Code sample**](/cloud/machines/#code-sample) tab. +To find the API key, API key ID, and machine address, go to [Viam app](https://app.viam.com/), select the machine you wish to connect to, and go to the **CONNECT** tab. Toggle **Include API key**, and then copy and paste the API key ID and the API key into your environment variables or directly into the code: {{< tabs >}} diff --git a/docs/appendix/apis/services/SLAM.md b/docs/dev/reference/apis/services/SLAM.md similarity index 90% rename from docs/appendix/apis/services/SLAM.md rename to docs/dev/reference/apis/services/SLAM.md index 619a882b4b..a402a31aa2 100644 --- a/docs/appendix/apis/services/SLAM.md +++ b/docs/dev/reference/apis/services/SLAM.md @@ -8,12 +8,14 @@ description: "Give commands to get a machine's position within a map." icon: true images: ["/services/icons/slam.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/services/slam/ # updated: "" # When the content was last entirely checked --- The SLAM service API allows you to get a machine's position within a map. -The [SLAM service](/services/slam/) supports the following methods: +The [SLAM service](/operate/reference/services/slam/) supports the following methods: {{< readfile "/static/include/services/apis/generated/slam-table.md" >}} diff --git a/docs/appendix/apis/services/_index.md b/docs/dev/reference/apis/services/_index.md similarity index 76% rename from docs/appendix/apis/services/_index.md rename to docs/dev/reference/apis/services/_index.md index fb20e2665f..3cee866623 100644 --- a/docs/appendix/apis/services/_index.md +++ b/docs/dev/reference/apis/services/_index.md @@ -4,6 +4,6 @@ title: "Service APIs" weight: 5 empty_node: true layout: "empty" -canonical: "/appendix/apis/" +canonical: "/dev/reference/apis/" type: "docs" --- diff --git a/docs/appendix/apis/services/base-rc.md b/docs/dev/reference/apis/services/base-rc.md similarity index 90% rename from docs/appendix/apis/services/base-rc.md rename to docs/dev/reference/apis/services/base-rc.md index 3f5970ccf1..c9a140bfd5 100644 --- a/docs/appendix/apis/services/base-rc.md +++ b/docs/dev/reference/apis/services/base-rc.md @@ -8,12 +8,14 @@ description: "Give commands to get a list of inputs from the controller that are icon: true images: ["/services/icons/base-rc.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/services/base-rc/ # updated: "" # When the content was last entirely checked --- The base remote control service API allows you to get a list of inputs from the controller that are being monitored for that control mode. -The [SLAM service](/services/slam/) supports the following methods: +The [SLAM service](/operate/reference/services/slam/) supports the following methods: {{< readfile "/static/include/services/apis/generated/base_remote_control-table.md" >}} diff --git a/docs/appendix/apis/services/data.md b/docs/dev/reference/apis/services/data.md similarity index 85% rename from docs/appendix/apis/services/data.md rename to docs/dev/reference/apis/services/data.md index 2dc20cd47e..ebf902707a 100644 --- a/docs/appendix/apis/services/data.md +++ b/docs/dev/reference/apis/services/data.md @@ -7,17 +7,19 @@ description: "Give commands to your data management service to sync data stored icon: true images: ["/icons/components/arm.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/services/data/ # updated: "" # When the content was last entirely checked --- The data management service API allows you to sync data stored on the machine it is deployed on to the cloud. -The [data management service](/services/data/) supports the following methods: +The [data management service](/data-ai/capture-data/capture-sync/) supports the following methods: {{< readfile "/static/include/services/apis/generated/data_manager-table.md" >}} The data client API supports a separate set of methods that allow you to upload and export data to and from the Viam app. -For information about that API, see [Data Client API](/appendix/apis/data-client/). +For information about that API, see [Data Client API](/dev/reference/apis/data-client/). ## Establish a connection diff --git a/docs/appendix/apis/services/generic.md b/docs/dev/reference/apis/services/generic.md similarity index 86% rename from docs/appendix/apis/services/generic.md rename to docs/dev/reference/apis/services/generic.md index 292de7b1a6..0fe8417f8a 100644 --- a/docs/appendix/apis/services/generic.md +++ b/docs/dev/reference/apis/services/generic.md @@ -8,10 +8,12 @@ description: "Give commands to your generic components for running model-specifi icon: true images: ["/icons/components/generic.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/services/generic/ # updated: "" # When the content was last entirely checked --- -The generic service API allows you to give commands to your [generic services](/services/generic/) for running model-specific commands using [`DoCommand`](/appendix/apis/services/generic/#docommand). +The generic service API allows you to give commands to your [generic services](/operate/reference/components/generic/) for running model-specific commands using [`DoCommand`](/dev/reference/apis/services/generic/#docommand). The generic service supports the following methods: diff --git a/docs/appendix/apis/services/ml.md b/docs/dev/reference/apis/services/ml.md similarity index 94% rename from docs/appendix/apis/services/ml.md rename to docs/dev/reference/apis/services/ml.md index bea271dcb5..903caf5f08 100644 --- a/docs/appendix/apis/services/ml.md +++ b/docs/dev/reference/apis/services/ml.md @@ -8,12 +8,14 @@ description: "Give commands to your ML model service to make inferences based on icon: true images: ["/services/icons/ml.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/services/ml/ # updated: "" # When the content was last entirely checked --- The ML model service API allows you to make inferences based on a provided ML model. -The [ML Model service](/services/ml/) supports the following methods: +The [ML Model service](/data-ai/ai/deploy/) supports the following methods: {{< readfile "/static/include/services/apis/generated/mlmodel-table.md" >}} diff --git a/docs/appendix/apis/services/motion.md b/docs/dev/reference/apis/services/motion.md similarity index 75% rename from docs/appendix/apis/services/motion.md rename to docs/dev/reference/apis/services/motion.md index cce2557dec..fee203d8cd 100644 --- a/docs/appendix/apis/services/motion.md +++ b/docs/dev/reference/apis/services/motion.md @@ -3,14 +3,14 @@ title: "Motion Service API" linkTitle: "Motion" weight: 40 type: "docs" -description: "Give commands to move a machine based on a SLAM map or GPS coordinates or to move a machine's components form one location to another." +description: "Give commands to move a machine's components from one location or pose to another." icon: true images: ["/icons/components/arm.svg"] date: "2022-01-01" # updated: "" # When the content was last entirely checked --- -The motion service API allows you to give commands to your [motion service](/services/motion/) for moving a machine based on a SLAM map or GPS coordinates or for moving a machine's components from one location to another. +The motion service API allows you to give commands to your [motion service](/operate/reference/services/motion/) for moving a mobile robot based on a SLAM map or GPS coordinates or for moving a machine's components from one pose to another. The motion service supports the following methods: @@ -25,7 +25,7 @@ To get started using Viam's SDKs to connect to and control your machine, go to y When executed, this sample code creates a connection to your machine as a client. Because the motion service is enabled by default, you don't give it a `"name"` while configuring it. -Use the name `"builtin"` to access the built-in motion service in your code with methods like [`FromRobot()`](/appendix/apis/services/motion/#fromrobot) that require a `ResourceName`. +Use the name `"builtin"` to access the built-in motion service in your code with methods like [`FromRobot()`](/dev/reference/apis/services/motion/#fromrobot) that require a `ResourceName`. Import the motion package for the SDK you are using: diff --git a/docs/appendix/apis/services/navigation.md b/docs/dev/reference/apis/services/navigation.md similarity index 90% rename from docs/appendix/apis/services/navigation.md rename to docs/dev/reference/apis/services/navigation.md index 558c9d18e0..a7064e8d27 100644 --- a/docs/appendix/apis/services/navigation.md +++ b/docs/dev/reference/apis/services/navigation.md @@ -8,12 +8,14 @@ description: "Give commands to define waypoints and move your machine along thos icon: true images: ["/services/icons/navigation.svg"] date: "2022-01-01" +aliases: + - /appendix/apis/services/navigation/ # updated: "" # When the content was last entirely checked --- The navigation service API allows you to define waypoints and move your machine along those waypoints while avoiding obstacles. -The [navigation service](/services/navigation/) supports the following methods: +The [navigation service](/operate/reference/services/navigation/) supports the following methods: {{< readfile "/static/include/services/apis/generated/navigation-table.md" >}} diff --git a/docs/services/vision/_index.md b/docs/dev/reference/apis/services/vision.md similarity index 57% rename from docs/services/vision/_index.md rename to docs/dev/reference/apis/services/vision.md index 4599b314df..692c7e6609 100644 --- a/docs/services/vision/_index.md +++ b/docs/dev/reference/apis/services/vision.md @@ -1,31 +1,31 @@ --- -title: "Vision Service" -linkTitle: "Computer Vision" +title: "Vision service API" +linkTitle: "Vision" weight: 20 type: "docs" -description: "The vision service enables your machine to use its on-board cameras to intelligently see and interpret the world around it." +tags: ["vision", "computer vision", "CV", "services"] +description: "Give commands to get detections, classifications, or point cloud objects, depending on the ML model the vision service is using." +aliases: + - /services/vision/ + - /ml/vision/detection/ + - /ml/vision/classification/ + - /ml/vision/segmentation/ + - /services/vision/segmentation/ + - /ml/vision/ + - /appendix/apis/services/vision/ icon: true images: ["/services/icons/vision.svg"] tags: ["vision", "computer vision", "CV", "services"] -no_list: true -modulescript: true -hide_children: true -aliases: - - "/services/vision/" - - "/ml/vision/detection/" - - "/ml/vision/classification/" - - "/ml/vision/segmentation/" - - "/services/vision/segmentation/" - - /ml/vision/ date: "2022-01-01" +aliases: + - /appendix/apis/services/vision/ # updated: "" # When the content was last entirely checked -# SMEs: Bijan, Khari --- -The vision service enables your machine to use its on-board [cameras](/components/camera/) to intelligently see and interpret the world around it. +The vision service enables your machine to use its on-board [cameras](/operate/reference/components/camera/) to intelligently see and interpret the world around it. While the camera component lets you access what your machine's camera sees, the vision service allows you to interpret your image data. -{{}} +The vision service API allows you to get detections, classifications, or point cloud objects, depending on the ML model the vision service is using. The vision service supports the following kinds of operations: @@ -55,8 +55,8 @@ The returned detections consist of the bounding box around the identified object **Supported API methods:** -- [GetDetections()](/appendix/apis/services/vision/#getdetections) -- [GetDetectionsFromCamera()](/appendix/apis/services/vision/#getdetectionsfromcamera) +- [GetDetections()](/dev/reference/apis/services/vision/#getdetections) +- [GetDetectionsFromCamera()](/dev/reference/apis/services/vision/#getdetectionsfromcamera) ## Classifications @@ -73,8 +73,8 @@ The returned classifications consist of the image's class label and confidence s **Supported API methods:** -- [GetClassifications()](/appendix/apis/services/vision/#getclassifications) -- [GetClassificationsFromCamera()](/appendix/apis/services/vision/#getclassificationsfromcamera) +- [GetClassifications()](/dev/reference/apis/services/vision/#getclassifications) +- [GetClassificationsFromCamera()](/dev/reference/apis/services/vision/#getclassificationsfromcamera) ## Segmentations @@ -88,26 +88,41 @@ Any camera that can return 3D pointclouds can use 3D object segmentation. **Supported API methods:** -- [GetObjectPointClouds()](/appendix/apis/services/vision/#getobjectpointclouds) +- [GetObjectPointClouds()](/dev/reference/apis/services/vision/#getobjectpointclouds) -## Configuration +The [vision service](/operate/reference/services/vision/) supports the following methods: -{{}} +{{< readfile "/static/include/services/apis/generated/vision-table.md" >}} -{{< readfile "/static/include/create-your-own-mr.md" >}} +## Establish a connection -## API +To get started using Viam's SDKs to connect to and control your machine, go to your machine's page on the [Viam app](https://app.viam.com), navigate to the **CONNECT** tab's **Code sample** page, select your preferred programming language, and copy the sample code. -The vision service supports the following [vision service API](/appendix/apis/services/vision/) methods: +{{% snippet "show-secret.md" %}} -{{< readfile "/static/include/services/apis/generated/vision-table.md" >}} +When executed, this sample code creates a connection to your machine as a client. -## Next Steps +The following examples assume that you have a machine configured with a [camera](/operate/reference/components/camera/) and a vision service [detector](/operate/reference/services/vision/#detections), [classifier](/operate/reference/services/vision/#classifications) or [segmenter](/operate/reference/services/vision/#segmentations). -For general configuration and development info, see: +{{< tabs >}} +{{% tab name="Python" %}} + +```python +from viam.services.vision import VisionClient +``` + +{{% /tab %}} +{{% tab name="Go" %}} + +```go +import ( + "go.viam.com/rdk/services/vision" +) +``` + +{{% /tab %}} +{{< /tabs >}} + +## API -{{< cards >}} -{{% card link="/how-tos/configure/" noimage="true" %}} -{{% card link="/how-tos/develop-app/" noimage="true" %}} -{{% card link="/how-tos/detect-people/" noimage="true" %}} -{{< /cards >}} +{{< readfile "/static/include/services/apis/generated/vision.md" >}} diff --git a/docs/appendix/apis/sessions.md b/docs/dev/reference/apis/sessions.md similarity index 90% rename from docs/appendix/apis/sessions.md rename to docs/dev/reference/apis/sessions.md index c6cb319e5e..debd3b219b 100644 --- a/docs/appendix/apis/sessions.md +++ b/docs/dev/reference/apis/sessions.md @@ -19,6 +19,7 @@ tags: aliases: - /program/apis/sessions/ - /build/program/apis/sessions/ + - /appendix/apis/sessions/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- @@ -29,10 +30,10 @@ The period of time during which a client is connected to a machine is called a _ _Session management_ is a safety precaution that allows you to manage the clients that are authenticated and communicating with a machine's `viam-server` instance. The default session management configuration checks for presence to ensures that a machine only moves when a client is actively connected and stops any components that remain running when a client disconnects. This is especially important for machines that physically move. -For example, imagine a wheeled rover gets a [`SetPower()`](/appendix/apis/components/base/#setpower) command as the last input from a client before the connection to the machine is interrupted. +For example, imagine a wheeled rover gets a [`SetPower()`](/dev/reference/apis/components/base/#setpower) command as the last input from a client before the connection to the machine is interrupted. Without session management, the API request from the client would cause the rover's motors to move, causing the machine to continue driving forever and potentially colliding with objects and people. -For more information, see [Client Sessions and Machine Network Connectivity](/sdks/connectivity/). +For more information, see [Client Sessions and Machine Network Connectivity](/dev/reference/sdks/connectivity/). If you want to manage operations differently, you can manage your machine's client sessions yourself. The Session Management API provides functionality for: @@ -65,7 +66,7 @@ The [Session Management API](https://pkg.go.dev/go.viam.com/rdk/session) is not Use the Go Client SDK instead. {{< alert title="Tip" color="tip" >}} -If you are looking to implement session management yourself only to increase the session window, you can increase the session window instead, by [increasing the `heartbeat_window`](/configure/#network). +If you are looking to implement session management yourself only to increase the session window, you can increase the session window instead, by increasing the `heartbeat_window` in the network configuration. {{< /alert >}} To manage your session with the session management API: @@ -76,7 +77,7 @@ To manage your session with the session management API: ### Disable default session management The `SessionsClient` that serves the session management API is automatically enabled on your machine. -It is instantiated as part of your [`RobotClient`](/appendix/apis/robot/) instance (client of the Machine API). +It is instantiated as part of your [`RobotClient`](/dev/reference/apis/robot/) instance (client of the Machine API). If you want to disable it, you can pass the option to your machine, as demonstrated in the following code snippets: {{< tabs >}} @@ -126,6 +127,6 @@ You can do this with Viam's client SDKs. ### Use the session management API to manually manage sessions -Use your [`RobotClient()`](/appendix/apis/robot/) instance to access the [`SessionsClient`](https://pkg.go.dev/go.viam.com/rdk/session) within your client SDK program. +Use your [`RobotClient()`](/dev/reference/apis/robot/) instance to access the [`SessionsClient`](https://pkg.go.dev/go.viam.com/rdk/session) within your client SDK program. This is a [gRPC](https://grpc.io/) client that `viam-server` instantiates at robot runtime. Then, define your own [`SessionsClient`](https://github.com/viamrobotics/rdk/blob/main/robot/client/client.go). diff --git a/docs/appendix/changelog.md b/docs/dev/reference/changelog.md similarity index 80% rename from docs/appendix/changelog.md rename to docs/dev/reference/changelog.md index 0545987f7b..a322e48039 100644 --- a/docs/appendix/changelog.md +++ b/docs/dev/reference/changelog.md @@ -30,6 +30,7 @@ aliases: - "/components/sensor/bme280/" - "/components/sensor/ds18b20/" - "/components/sensor/sensirion-sht3xd/" + - /appendix/changelog/ layout: "changelog" outputs: - rss @@ -87,13 +88,13 @@ The following models were removed: {{% /changelog %}} {{% changelog date="2024-11-05" color="added" title="MoveThroughJointPositions to arm interface" %}} -The [arm interface](/appendix/apis/components/arm/) now includes a [MoveThroughJointPositions](https://pkg.go.dev/go.viam.com/rdk/components/arm#Arm) method that moves an arm through an ordered array of joint positions. +The [arm interface](/dev/reference/apis/components/arm/) now includes a [MoveThroughJointPositions](https://pkg.go.dev/go.viam.com/rdk/components/arm#Arm) method that moves an arm through an ordered array of joint positions. {{% /changelog %}} {{% changelog date="2024-10-16" color="added" title="Set data retention policies" %}} You can now set how long data collected by a component should remain stored in the Viam Cloud in the component's data capture configuration. -For more information, see [Data management service](/services/data/). +For more information, see [Data management service](/data-ai/capture-data/capture-sync/). {{% /changelog %}} @@ -106,75 +107,75 @@ The Raspberry Pi 4, 3, and Zero 2 W boards are now supported by [`viam:raspberry {{% changelog date="2024-08-26" color="added" title="ESP32 cameras" %}} `viam-micro-server` now supports cameras on ESP32s. -For more information, see [Configure an esp32-camera](/components/camera/esp32-camera/). +For more information, see [Configure an esp32-camera](/operate/reference/components/camera/esp32-camera/). {{% /changelog %}} {{% changelog date="2024-08-26" color="changed" title="Micro-RDK now called viam-micro-server" %}} The lightweight version of `viam-server` that is built from the micro-RDK is now referred to as `viam-micro-server`. -For more information, see [viam-micro-server](/architecture/viam-micro-server/). +For more information, see [viam-micro-server](/operate/reference/viam-micro-server/). {{% /changelog %}} {{% changelog date="2024-08-26" color="added" title="Provisioning" %}} You can now configure provisioning for machines with the Viam Agent. -For more information, see [Configure provisioning with viam-agent](/how-tos/provision-setup/). +For more information, see [Configure provisioning with viam-agent](/manage/fleet/provision/setup/). {{% /changelog %}} {{% changelog date="2024-08-16" color="added" title="Data capture for vision" %}} Data capture is now possible for the vision service. -For more information, see [Supported components and services](/services/data/#supported-components-and-services). +For more information, see [Supported components and services](/data-ai/capture-data/capture-sync/#supported-resources). {{% /changelog %}} {{% changelog date="2024-08-01" color="added" title="Create custom training scripts" %}} You can now upload custom training scripts to the Viam Registry and use them to train machine learning models. -For more information, see [Create custom training scripts](/how-tos/create-custom-training-scripts/). +For more information, see [Create custom training scripts](/data-ai/ai/train/). {{% /changelog %}} {{% changelog date="2024-07-19" color="changed" title="Operators can now view data" %}} The operator role now has view permissions for the data in the respective resource a user has access to. -For more information, see [Data and machine learning permissions](/cloud/rbac/#data-and-machine-learning). +For more information, see [Data and machine learning permissions](/manage/manage/rbac/#data-and-machine-learning). {{% /changelog %}} {{% changelog date="2024-06-14" color="changed" title="Python get_robot_part_logs parameters" %}} -The `errors_only` parameter has been removed from [`get_robot_part_logs()`](/appendix/apis/fleet/#getrobotpartlogs) and replaced with `log_levels`. +The `errors_only` parameter has been removed from [`get_robot_part_logs()`](/dev/reference/apis/fleet/#getrobotpartlogs) and replaced with `log_levels`. {{% /changelog %}} {{% changelog date="2024-05-28" color="changed" title="Return type of analog Read" %}} -The board analog API [`Read()`](/appendix/apis/components/board/#readanalogreader) method now returns an `AnalogValue` struct instead of a single int. +The board analog API [`Read()`](/dev/reference/apis/components/board/#readanalogreader) method now returns an `AnalogValue` struct instead of a single int. The struct contains an int representing the value of the reading, min and max range of values, and the precision of the reading. {{% /changelog %}} {{% changelog date="2024-05-28" color="added" title="CaptureAllFromCamera and GetProperties to vision API" %}} -The vision service now supports two new methods: [`CaptureAllFromCamera`](/appendix/apis/services/vision/#captureallfromcamera) and [`GetProperties`](/appendix/apis/services/vision/#getproperties). +The vision service now supports two new methods: [`CaptureAllFromCamera`](/dev/reference/apis/services/vision/#captureallfromcamera) and [`GetProperties`](/dev/reference/apis/services/vision/#getproperties). {{% /changelog %}} {{% changelog date="2024-05-14" color="changed" title="Renamed GeoObstacle to GeoGeometry" %}} The motion service API parameter `GeoObstacle` has been renamed to `GeoGeometry`. -This affects users of the [`MoveOnGlobe()`](/appendix/apis/services/motion/#moveonglobe) method. +This affects users of the [`MoveOnGlobe()`](/dev/reference/apis/services/motion/#moveonglobe) method. {{% /changelog %}} {{< changelog date="2024-05-09" color="changed" title="Return type of GetImage" >}} The Python SDK introduced a new image container class called [`ViamImage`](https://python.viam.dev/autoapi/viam/components/camera/index.html#viam.components.camera.ViamImage). -The camera component's [`GetImage()`](/appendix/apis/components/camera/#getimage) method now returns a `ViamImage` type, and the vision service's [`GetDetections()`](/appendix/apis/services/vision/#getdetections) and [`GetClassifications()`](/appendix/apis/services/vision/#getclassifications) methods take in `ViamImage` as a parameter. +The camera component's [`GetImage()`](/dev/reference/apis/components/camera/#getimage) method now returns a `ViamImage` type, and the vision service's [`GetDetections()`](/dev/reference/apis/services/vision/#getdetections) and [`GetClassifications()`](/dev/reference/apis/services/vision/#getclassifications) methods take in `ViamImage` as a parameter. You can use the helper functions `viam_to_pil_image` and `pil_to_viam_image` provided by the Python SDK to convert the `ViamImage` into a [`PIL Image`](https://omz-software.com/pythonista/docs/ios/Image.html) and vice versa. @@ -207,13 +208,13 @@ detections = await detector.get_detections(cropped_frame) {{% changelog date="2024-05-08" color="removed" title="WriteAnalog from Go SDK" %}} The `WriteAnalog()` method has been removed from the Go SDK. -Use [`AnalogByName()`](/appendix/apis/components/board/#analogbyname) followed by [`Write()`](/appendix/apis/components/board/#writeanalog) instead. +Use [`AnalogByName()`](/dev/reference/apis/components/board/#analogbyname) followed by [`Write()`](/dev/reference/apis/components/board/#writeanalog) instead. {{% /changelog %}} {{% changelog date="2024-04-30" color="changed" title="Python SDK data retrieval behavior" %}} -[`tabular_data_by_filter()`](/appendix/apis/data-client/#tabulardatabyfilter) and [`binary_data_by_filter()`](/appendix/apis/data-client/#binarydatabyfilter) now return paginated data. +[`tabular_data_by_filter()`](/dev/reference/apis/data-client/#tabulardatabyfilter) and [`binary_data_by_filter()`](/dev/reference/apis/data-client/#binarydatabyfilter) now return paginated data. {{% /changelog %}} @@ -221,13 +222,13 @@ Use [`AnalogByName()`](/appendix/apis/components/board/#analogbyname) followed b `AnalogReader` has been renamed to `Analog`. The functionality remains the same, but code that uses analogs must be updated. -`AnalogReaderByName()` and `AnalogReaderNames()` have become [`AnalogByName()`](/appendix/apis/components/board/#analogbyname) and `AnalogNames()` (since deprecated), respectively. +`AnalogReaderByName()` and `AnalogReaderNames()` have become [`AnalogByName()`](/dev/reference/apis/components/board/#analogbyname) and `AnalogNames()` (since deprecated), respectively. {{% /changelog %}} {{% changelog date="2024-04-30" color="added" title="Part online and part offline triggers" %}} -You can now configure [triggers](/configure/triggers/) to execute actions when a {{< glossary_tooltip term_id="part" text="machine part" >}} comes online or goes offline. +You can now configure [triggers](/manage/troubleshoot/alert/) to execute actions when a {{< glossary_tooltip term_id="part" text="machine part" >}} comes online or goes offline. {{% /changelog %}} @@ -245,7 +246,7 @@ Viam has removed support for following builtin camera models: `single_stream`, ` {{% changelog date="2024-04-17" color="changed" title="Updated GetCloudMetadata response" %}} -In addition to the existing returned metadata, the [`GetCloudMetadata`](/appendix/apis/robot/#getcloudmetadata) method now returns `machine_id` and `machine_part_id` as well. +In addition to the existing returned metadata, the [`GetCloudMetadata`](/dev/reference/apis/robot/#getcloudmetadata) method now returns `machine_id` and `machine_part_id` as well. {{% /changelog %}} @@ -259,19 +260,19 @@ In addition to other improvements, your component, service, and other resource c {{% changelog date="2024-03-01" color="added" title="Additional ML models" %}} Viam has added support for the TensorFlow, PyTorch, and ONNX ML model frameworks, expanding upon the existing support for TensorFlow Lite models. -You can now upload your own ML model(/registry/ml-models/) using any of these frameworks for use with the Vision service. +You can now upload your own ML model(/data-ai/ai/deploy/#deploy-your-ml-model) using any of these frameworks for use with the Vision service. {{% /changelog %}} {{% changelog date="2024-03-01" color="added" title="Ultrasonic sensor for `viam-micro-server`" %}} -You can now use the [ultrasonic sensor component](/components/sensor/ultrasonic-micro-rdk/) with [`viam-micro-server`](/installation/viam-micro-server-setup/#install-viam-micro-server) to integrate an [HC-S204](https://www.sparkfun.com/products/15569) ultrasonic distance sensor into a machine running `viam-micro-server`. +You can now use the [ultrasonic sensor component](/operate/reference/components/sensor/ultrasonic-micro-rdk/) with [`viam-micro-server`](/operate/reference/viam-micro-server/) to integrate an [HC-S204](https://www.sparkfun.com/products/15569) ultrasonic distance sensor into a machine running `viam-micro-server`. {{% /changelog %}} {{% changelog date="2024-03-01" color="added" title="Edit a machine configuration that uses a fragment" %}} -You can now edit the configuration of an existing machine that has been configured with a fragment by using [the `fragment_mods` object](/fleet/fragments/#modify-the-config-of-a-machine-that-uses-a-fragment) in your configuration. +You can now edit the configuration of an existing machine that has been configured with a fragment by using [the `fragment_mods` object](/manage/fleet/reuse-configuration/#modify-fragment-settings-on-a-machine) in your configuration. You can use the `fragment_mods` objects to be able to deploy a fragment to a fleet of machines, but still be able to make additional per-machine edits as needed. {{% /changelog %}} @@ -285,20 +286,20 @@ The dual GPS movement sensor calculates a compass heading from both GPS sensors, {{% changelog date="2024-03-01" color="added" title="Viam Agent" %}} -You can now use the [Viam Agent](/fleet/provision/) to provision your machine or fleet of machines during deployment. +You can now use the [Viam Agent](/manage/reference/viam-agent/) to provision your machine or fleet of machines during deployment. The Viam Agent is a software provisioning manager that you can install on your machine which manages your `viam-server` installation, including installation and ongoing updates, as well as providing flexible deployment configuration options, such as pre-configured WiFi network credentials. {{% /changelog %}} {{% changelog date="2024-02-12" color="added" title="Generic service" %}} -You can now use the [generic service](/services/generic/) to define new, unique types of services that do not already have an [appropriate API](/appendix/apis/#service-apis) defined for them. +You can now use the [generic service](/operate/reference/components/generic/) to define new, unique types of services that do not already have an [appropriate API](/dev/reference/apis/#service-apis) defined for them. {{% /changelog %}} {{% changelog date="2024-02-12" color="added" title="ML models in the registry" %}} -You can now upload [machine learning (ML) models](/registry/ml-models/) to the Viam Registry, in addition to modules. +You can now upload [machine learning (ML) models](/data-ai/ai/deploy/#deploy-your-ml-model) to the Viam Registry, in addition to modules. You may upload models you have trained yourself using the Viam app, or models you have trained outside of the App. When uploading, you have the option to make your model available to the general public for reuse. @@ -306,14 +307,14 @@ When uploading, you have the option to make your model available to the general {{% changelog date="2024-01-31" color="added" title="Sensor-controlled base" %}} -Viam has added a [sensor-controlled base](/components/base/sensor-controlled/) component model, which supports a robotic base that receives feedback control from a movement sensor. +Viam has added a [sensor-controlled base](/operate/reference/components/base/sensor-controlled/) component model, which supports a robotic base that receives feedback control from a movement sensor. {{% /changelog %}} {{% changelog date="2024-01-31" color="added" title="Visualize captured data" %}} -You can now [visualize your data](/how-tos/sensor-data-visualize/) using many popular third-party visualization tools, including Grafana, Tableau, Google’s Looker Studio, and more. -You can visualize any data, such as sensor readings, that you have [synced](/services/data/) to the Viam app from your machine. +You can now [visualize your data](/data-ai/data/visualize/) using many popular third-party visualization tools, including Grafana, Tableau, Google’s Looker Studio, and more. +You can visualize any data, such as sensor readings, that you have [synced](/data-ai/capture-data/capture-sync/) to the Viam app from your machine. See [Visualize data with Grafana](/tutorials/services/visualize-data-grafana/) for a full walkthrough focused on Grafana specifically. @@ -321,7 +322,7 @@ See [Visualize data with Grafana](/tutorials/services/visualize-data-grafana/) f {{% changelog date="2024-01-31" color="added" title="Use triggers to trigger actions" %}} -You can now configure [triggers](/configure/triggers/) (previously called webhooks) to execute actions when certain types of data are sent from your machine to the cloud. +You can now configure [triggers](/data-ai/data/advanced/alert-data/) (previously called webhooks) to execute actions when certain types of data are sent from your machine to the cloud. {{% /changelog %}} @@ -330,19 +331,19 @@ You can now configure [triggers](/configure/triggers/) (previously called webhoo Viam has added a [`filtered-camera` module](https://app.viam.com/module/erh/filtered-camera) that selectively captures and syncs only the images that match the detections of an ML model. For example, you could train an ML model that is focused on sports cars, and only capture images from the camera feed when a sports car is detected in the frame. -Check out [this guide](/how-tos/image-data/) for more information. +Check out [this guide](/data-ai/capture-data/filter-before-sync/) for more information. {{% /changelog %}} {{% changelog date="2023-12-31" color="added" title="Raspberry Pi 5 Support" %}} -You can now run `viam-server` on a [Raspberry Pi 5](/components/board/pi5/) with the new board model [`pi5`](/components/board/pi5/). +You can now run `viam-server` on a [Raspberry Pi 5](/operate/reference/components/board/pi5/) with the new board model [`pi5`](/operate/reference/components/board/pi5/). {{% /changelog %}} {{% changelog date="2023-12-31" color="added" title="Role-based access control" %}} -Users can now have [access to different fleet management capabilities](/cloud/rbac/) depending on whether they are an owner or an operator of a given organization, location, or machine. +Users can now have [access to different fleet management capabilities](/manage/manage/rbac/) depending on whether they are an owner or an operator of a given organization, location, or machine. {{% /changelog %}} @@ -358,20 +359,20 @@ Location secrets, the previous method of authentication, is deprecated and will Once you have added the data management service and synced data, such as sensor readings, to the Viam app, you can now run queries against both captured data as well as its metadata using either SQL or MQL. -For more information, see [Query Data with SQL or MQL](/how-tos/sensor-data-query-with-third-party-tools/). +For more information, see [Query Data with SQL or MQL](/data-ai/data/query/). {{% /changelog %}} {{% changelog date="2023-11-30" color="changed" title="Model training from datasets" %}} -To make it easier to iterate while training machine learning models from image data, you now train models from [datasets](/fleet/dataset/). +To make it easier to iterate while training machine learning models from image data, you now train models from [datasets](/data-ai/ai/create-dataset/). {{% /changelog %}} {{% changelog date="2023-11-30" color="improved" title="Manage users access" %}} You can now manage users access to machines, locations, and organizations. -For more information, see [Access Control](/cloud/rbac/) +For more information, see [Access Control](/manage/manage/rbac/) {{% /changelog %}} @@ -399,7 +400,7 @@ Read the [Viam PLC Support](https://www.viam.com/post/viam-plc-support-democrati {{% changelog date="2023-10-31" color="improved" title="SLAM map creation" %}} -The [Cartographer-module](/services/slam/cartographer/) now runs in Viam's cloud for creating or updating maps. +The [Cartographer-module](/operate/reference/services/slam/cartographer/) now runs in Viam's cloud for creating or updating maps. This enhancement allows you to: - Generate larger maps without encountering session timeouts @@ -412,7 +413,7 @@ This enhancement allows you to: {{% changelog date="2023-09-30" color="added" title="Modular registry" %}} -The [Modular Registry](/registry/) enables you to use, create, and share custom modules, extending the capabilities of Viam beyond the components and services that are natively supported. +The [Modular Registry](https://app.viam.com/registry/) enables you to use, create, and share custom modules, extending the capabilities of Viam beyond the components and services that are natively supported. You can: @@ -424,18 +425,18 @@ You can: {{% changelog date="2023-09-30" color="added" title="Mobile app" %}} -You can use a [mobile application](/fleet/control/#control-interface-in-the-viam-mobile-app), available for download now in the [Apple](https://apps.apple.com/us/app/viam-robotics/id6451424162) and [Google Play](https://play.google.com/store/apps/details?id=com.viam.viammobile&hl=en&gl=US) app stores, to connect to and control your Viam-powered machines directly from your mobile device. +You can use a [mobile application](/manage/troubleshoot/teleoperate/default-interface/#viam-mobile-app), available for download now in the [Apple](https://apps.apple.com/us/app/viam-robotics/id6451424162) and [Google Play](https://play.google.com/store/apps/details?id=com.viam.viammobile&hl=en&gl=US) app stores, to connect to and control your Viam-powered machines directly from your mobile device. {{% /changelog %}} {{% changelog date="2023-09-30" color="added" title="Power sensor component" %}} -You now have the capability to use a [power sensor component](/components/power-sensor/) to monitor the voltage, current, and power consumption within your machine's system. +You now have the capability to use a [power sensor component](/operate/reference/components/power-sensor/) to monitor the voltage, current, and power consumption within your machine's system. {{% /changelog %}} {{% changelog date="2023-09-30" color="added" title="Filter component’s data before the cloud" %}} -Viam has written a module that allows you to filter data based on specific criteria before syncing it to [Viam's cloud](/services/data/). +Viam has written a module that allows you to filter data based on specific criteria before syncing it to [Viam's cloud](/data-ai/capture-data/capture-sync/). It equips machines to: - Remove data that is not of interest @@ -487,7 +488,7 @@ Note that currently, accounts from different SSO providers are treated separatel {{% changelog date="2023-07-31" color="improved" title="Arm component API" %}} -Arm models now support the [`GetKinematics` method](/appendix/apis/components/arm/#getkinematics) in the arm API, allowing you to request and receive kinematic information. +Arm models now support the [`GetKinematics` method](/dev/reference/apis/components/arm/#getkinematics) in the arm API, allowing you to request and receive kinematic information. {{% /changelog %}} @@ -502,8 +503,8 @@ If you depend on sensor data to plan and control machine operations, this featur The Python SDK now includes sessions, a safety feature that automatically cancels operations if the client loses connection to your machine. -[Session management](/appendix/apis/sessions/) helps you to ensure safer operation of your machine when dealing with actuating controls. -Sessions are enabled by default, with the option to [disable sessions](/appendix/apis/sessions/#disable-default-session-management). +[Session management](/dev/reference/apis/sessions/) helps you to ensure safer operation of your machine when dealing with actuating controls. +Sessions are enabled by default, with the option to [disable sessions](/dev/reference/apis/sessions/#disable-default-session-management). {{% /changelog %}} @@ -518,15 +519,13 @@ See the [Odrive module readme](https://github.com/viamrobotics/odrive) to learn {{% changelog date="2023-06-30" color="added" title="Implement custom robotic arms as Viam modules" %}} When prototyping a robotic arm, you can now facilitate movement without creating your own motion planning. -This update enables you to implement custom models of an arm component as a [modular resource](/registry/) by coding three endpoints of the [Arm API](/appendix/apis/components/arm/#api): +This update enables you to implement custom models of an arm component as a modular resource by coding three endpoints of the [Arm API](/dev/reference/apis/components/arm/#api): - `getJointPositions` - `movetoJointPositions` - `GetKinematics` -Then, use the [motion planning service](/services/motion/) to specify poses, and Viam handles the rest. - -For more information, see this [tutorial on creating a custom arm](/registry/examples/custom-arm/). +Then, use the [motion planning service](/operate/reference/services/motion/) to specify poses, and Viam handles the rest. {{% /changelog %}} @@ -534,7 +533,7 @@ For more information, see this [tutorial on creating a custom arm](/registry/exa To better control gantries with Viam, you can now: -- Specify speed values when calling the `MovetoPosition` method on [Gantry components](/components/gantry/). +- Specify speed values when calling the `MovetoPosition` method on [Gantry components](/operate/reference/components/gantry/). This allows you to define the speed at which each axis moves to the desired position, providing enhanced precision and control over the gantry's movement. - Set a home position for Gantry components to facilitate position resetting or maintain consistent starting points. @@ -542,7 +541,7 @@ To better control gantries with Viam, you can now: {{% changelog date="2023-06-30" color="improved" title="Optimized Viam-trained object detection models" %}} -This update for object detection models [trained with the machine learning service](/how-tos/train-deploy-ml/) brings significant improvements, including: +This update for TFlite object detection models [trained with the machine learning service](/data-ai/ai/train-tflite/) brings significant improvements, including: - 76% faster model inference for camera streams - 64% quicker model training for object detection @@ -558,7 +557,7 @@ The beta release of the [TypeScript SDK](https://github.com/viamrobotics/viam-ty {{% changelog date="2023-05-31" color="added" title="Train object detection ML models" %}} -You now have the capability to directly [train object detection models](/how-tos/train-deploy-ml/) in addition to image classification models from within the Viam app. +You now have the capability to directly [train a TFlite object detection models](/data-ai/ai/train-tflite/) in addition to image classification models from within the Viam app. This update allows you to: @@ -571,13 +570,13 @@ This update allows you to: Now when you invite collaborators to join your organization, you can assign permissions to members by setting one of these roles: -- **Owner**: These members can see and edit [every tab on the machine page](/cloud/machines/#navigating-the-machine-page), as well as manage users in the app. +- **Owner**: These members can see and edit every tab on the machine page, as well as manage users in the app. This role is best for those on your team who are actively engineering and building machines. -- **Operator**: These members can only see and use the [remote control tab](/fleet/control/). +- **Operator**: These members can only see and use the [remote control tab](/manage/troubleshoot/teleoperate/default-interface/). This role is best for those on your team who are teleoperating or remotely controlling machines. -For more information about assigning permissions and collaborating with others on Viam, see [Fleet Management](/fleet/). +For more information about assigning permissions and collaborating with others on Viam, see [Manage access](/manage/manage/access/). {{% /changelog %}} @@ -605,7 +604,7 @@ The latest updates enable you to: The updated code samples now includes: - Options for C++ and TypeScript -- The ability to hide or display your machines' [secrets](/appendix/apis/) +- The ability to hide or display your machines' [secrets](/dev/reference/apis/) Access these samples in the **Code sample** tab on your machine's page to connect to your machine in various languages. @@ -621,7 +620,7 @@ You can manage the data synced to Viam's cloud with the new capability for bulk {{% alert title="Important: Breaking Change" color="note" %}} -The [vision service](/services/vision/) became more modular in RDK [v0.2.36](https://github.com/viamrobotics/rdk/releases/tag/v0.2.36), API [v0.1.118](https://github.com/viamrobotics/api/releases/tag/v0.1.118), and Python SDK [v0.2.18](https://github.com/viamrobotics/viam-python-sdk/releases/tag/v0.2.18). +The [vision service](/operate/reference/services/vision/) became more modular in RDK [v0.2.36](https://github.com/viamrobotics/rdk/releases/tag/v0.2.36), API [v0.1.118](https://github.com/viamrobotics/api/releases/tag/v0.1.118), and Python SDK [v0.2.18](https://github.com/viamrobotics/viam-python-sdk/releases/tag/v0.2.18). Find more information on each of the changes below. @@ -665,7 +664,7 @@ detections = await vision.get_detections(img, "find_objects") #### Color detector configurations -You can replace existing color detectors by [configuring new ones in the UI](/services/vision/color_detector/) or you can update the [JSON configuration of your machines](/configure/#the-configure-tab): +You can replace existing color detectors by [configuring new ones in the UI](/operate/reference/services/vision/color_detector/) or you can update the JSON configuration of your machines: {{< tabs >}} {{% tab name="New Way" %}} @@ -740,7 +739,7 @@ You can replace existing color detectors by [configuring new ones in the UI](/se #### TFLite detector configurations -You can replace existing TFLite detectors by [configuring new ones in the UI](/services/vision/mlmodel/) or you can update the [JSON configuration of your machines](/configure/#the-configure-tab): +You can replace existing TFLite detectors by [configuring new ones in the UI](/operate/reference/services/vision/mlmodel/) or you can update the JSON configuration of your machines: {{< tabs >}} {{% tab name="New Way" %}} @@ -800,7 +799,7 @@ You can replace existing TFLite detectors by [configuring new ones in the UI](/s #### TFLite Classifier configurations -You can replace existing TFLite classifiers by [configuring new ones in the UI](/services/vision/mlmodel/) or you can update the [JSON configuration of your machines](/configure/#the-configure-tab): +You can replace existing TFLite classifiers by [configuring new ones in the UI](/operate/reference/services/vision/mlmodel/) or you can update the JSON configuration of your machines: {{< tabs >}} {{% tab name="New Way" %}} @@ -860,7 +859,7 @@ You can replace existing TFLite classifiers by [configuring new ones in the UI]( #### Radius Clustering 3D segmenter configurations -You can replace existing Radius Clustering 3D segmenters by [configuring new ones in the UI](/services/vision/obstacles_pointcloud/) or you can update the [JSON configuration of your machines](/configure/#the-configure-tab): +You can replace existing Radius Clustering 3D segmenters by [configuring new ones in the UI](/operate/reference/services/vision/obstacles_pointcloud/) or you can update the JSON configuration of your machines: {{< tabs >}} {{% tab name="New Way" %}} @@ -914,7 +913,7 @@ You can replace existing Radius Clustering 3D segmenters by [configuring new one #### Detector to 3D segmenter configurations -You can replace existing Radius Clustering 3D segmenters by [configuring new ones in the UI](/services/vision/detector_3d_segmenter/) or you can update the [JSON configuration of your machines](/configure/#the-configure-tab): +You can replace existing Radius Clustering 3D segmenters by [configuring new ones in the UI](/operate/reference/services/vision/detector_3d_segmenter/) or you can update the JSON configuration of your machines: {{< tabs >}} {{% tab name="New Way" %}} @@ -975,21 +974,21 @@ You will no longer be able to add or remove models using the SDKs. #### Add machine learning vision models to a vision service The way to add machine learning vision models is changing. -You will need to first register the machine learning model file with the [ML model service](/services/ml/) and then add that registered model to a vision service. +You will need to first register the machine learning model file with the [ML model service](/data-ai/ai/deploy/) and then add that registered model to a vision service. {{% /changelog %}} {{% changelog date="2023-03-31" color="added" title="Machine learning for image classification models" %}} -You can now [train](/how-tos/train-deploy-ml/) and [deploy](/services/ml/) image classification models with the [data management service](/services/data/) and use your machine's image data directly within Viam. -Additionally, you can upload and use existing [machine learning models](/registry/ml-models/) with your machines. -For more information on using data synced to the cloud to train machine learning models, read [Train a model](/how-tos/train-deploy-ml/). +You can now [train](/data-ai/ai/train-tflite/) and [deploy](/data-ai/ai/deploy/) image classification models with the [data management service](/data-ai/capture-data/capture-sync/) and use your machine's image data directly within Viam. +Additionally, you can upload and use existing [machine learning models](/data-ai/ai/deploy/#deploy-your-ml-model) with your machines. +For more information on using data synced to the cloud to train machine learning models, read [train a TFlite](/data-ai/ai/train-tflite/) or [another model](data-ai/ai/train/). {{% /changelog %}} {{% changelog date="2023-03-31" color="added" title="Motion planning with new `constraint` parameter" %}} -A new parameter, [`constraint`](/services/motion/constraints/), has been added to the [Motion service API](/appendix/apis/services/motion/#api), allowing you to define restrictions on the machine's movement. +A new parameter, [`constraint`](/services/motion/constraints/), has been added to the [Motion service API](/dev/reference/apis/services/motion/#api), allowing you to define restrictions on the machine's movement. The constraint system also provides flexibility to specify that obstacles should only impact specific frames of a machine. {{% /changelog %}} @@ -1050,20 +1049,20 @@ Find more information in the [TypeScript SDK docs](https://ts.viam.dev/). {{% changelog date="2023-02-28" color="added" title="Frame system visualizer" %}} -When adding [frames](/services/frame-system/) to your machine's config in the Viam app, you can now use the **Frame System** subtab of the **CONFIGURE** tab to more easily visualize the relative positions of frames. +When adding [frames](/operate/mobility/define-geometry/) to your machine's config in the Viam app, you can now use the **Frame System** subtab of the **CONFIGURE** tab to more easily visualize the relative positions of frames. {{% /changelog %}} {{% changelog date="2023-02-28" color="added" title="Support for microcontrollers" %}} `viam-micro-server` is a lightweight version of `viam-server` that can run on an ESP32. -Find more information in the [`viam-micro-server` installation docs](/installation/viam-micro-server-setup/#install-viam-micro-server). +Find more information in the [`viam-micro-server` docs](/operate/reference/viam-micro-server/). {{% /changelog %}} {{% changelog date="2023-01-31" color="added" title="Remote control power input" %}} -On your machine's **CONTROL** tab on the [Viam app](https://app.viam.com/), you can now set the power of a [base](/components/base/). +On your machine's **CONTROL** tab on the [Viam app](https://app.viam.com/), you can now set the power of a [base](/operate/reference/components/base/). The base control UI previously always sent 100% power to the base's motors. {{% /changelog %}} @@ -1076,13 +1075,13 @@ The [AMS AS5048](https://github.com/viam-modules/ams) is now supported. {{% changelog date="2023-01-31" color="added" title="GetLinearAcceleration method" %}} -The movement sensor API now includes a [GetLinearAcceleration](/appendix/apis/components/movement-sensor/#getlinearacceleration) method. +The movement sensor API now includes a [GetLinearAcceleration](/dev/reference/apis/components/movement-sensor/#getlinearacceleration) method. {{% /changelog %}} {{% changelog date="2023-01-31" color="added" title="Support for capsule geometry" %}} -The [motion service](/services/motion/) now supports capsule geometries. +The [motion service](/operate/reference/services/motion/) now supports capsule geometries. The UR5 arm model has been improved using this new geometry type. @@ -1090,7 +1089,7 @@ The UR5 arm model has been improved using this new geometry type. {{% changelog date="2022-12-28" color="added" title="Modular resources" %}} -You can now implement your own custom {{< glossary_tooltip term_id="resource" text="resources" >}} as [_modular resources_](/registry/). +You can now implement your own custom {{< glossary_tooltip term_id="resource" text="resources" >}} as [_modular resources_ in the registry](https://app.viam.com/registry/). {{% alert title="Important: Breaking Change" color="note" %}} @@ -1126,7 +1125,7 @@ There are two new movement sensor {{< glossary_tooltip term_id="model" text="mod {{% changelog date="2022-12-28" color="improved" title="Motion planning with remote components" %}} -The [motion service](/services/motion/) is now agnostic to the networking topology of a machine. +The [motion service](/operate/reference/services/motion/) is now agnostic to the networking topology of a machine. - Kinematic information is now transferred over the robot API. This means that the motion service is able to get kinematic information for every component on the machine, regardless of whether it is on a main or remote viam-server. @@ -1158,13 +1157,13 @@ Now is uses a simpler unary approach that is more performant on batched unary ca - Transform - Join pointclouds -For information on configuring any camera model, see [Camera Component](/components/camera/). +For information on configuring any camera model, see [Camera Component](/operate/reference/components/camera/). {{% /changelog %}} {{% changelog date="2022-11-15" color="added" title="New servo model" %}} -A new [servo model called `gpio`](/components/servo/gpio/) supports servos connected to non-Raspberry Pi boards. +A new [servo model called `gpio`](/operate/reference/components/servo/gpio/) supports servos connected to non-Raspberry Pi boards. {{% /changelog %}} @@ -1212,7 +1211,7 @@ Other service configurations are not affected. {{% changelog date="2022-11-15" color="removed" title="Width and height fields from camera API" %}} -Removed `width` and `height` from the response of the [`GetImage`](/appendix/apis/components/camera/#getimage) method in the camera API. +Removed `width` and `height` from the response of the [`GetImage`](/dev/reference/apis/components/camera/#getimage) method in the camera API. This does not impact any existing camera models. If you write a custom camera model, you no longer need to implement the `width` and `height` fields. diff --git a/docs/appendix/glossary/api-namespace-triplet.md b/docs/dev/reference/glossary/api-namespace-triplet.md similarity index 64% rename from docs/appendix/glossary/api-namespace-triplet.md rename to docs/dev/reference/glossary/api-namespace-triplet.md index 852562aa27..8ad1c956db 100644 --- a/docs/appendix/glossary/api-namespace-triplet.md +++ b/docs/dev/reference/glossary/api-namespace-triplet.md @@ -13,9 +13,9 @@ The `namespace` for built-in Viam resources is `rdk`, while the `type` is `compo `subtype` refers to a specific component or service, like a `camera` or `vision`. One subtype can have various {{< glossary_tooltip term_id="model" text="models" >}}, custom or built-in, but they all must conform to the subtype's API definition. -This requirement ensures that when a resource of that model is deployed, you can [interface with it](/sdks/) using the same [client API methods](/appendix/apis/) you would when programming resources of the same subtype with a different model. +This requirement ensures that when a resource of that model is deployed, you can [interface with it](/dev/reference/sdks/) using the same [client API methods](/dev/reference/apis/) you would when programming resources of the same subtype with a different model. For example: -- The API of the built-in component [camera](/components/camera/) is `rdk:component:camera`, which exposes methods such as `GetImage()`. -- The API of the built-in service [vision](/services/vision/) is `rdk:service:vision`, which exposes methods such as `GetDetectionsFromCamera()`. +- The API of the built-in component [camera](/operate/reference/components/camera/) is `rdk:component:camera`, which exposes methods such as `GetImage()`. +- The API of the built-in service [vision](/operate/reference/services/vision/) is `rdk:service:vision`, which exposes methods such as `GetDetectionsFromCamera()`. diff --git a/docs/appendix/glossary/attribute.md b/docs/dev/reference/glossary/attribute.md similarity index 100% rename from docs/appendix/glossary/attribute.md rename to docs/dev/reference/glossary/attribute.md diff --git a/docs/appendix/glossary/base.md b/docs/dev/reference/glossary/base.md similarity index 78% rename from docs/appendix/glossary/base.md rename to docs/dev/reference/glossary/base.md index 3471a73198..fd79b5b1fc 100644 --- a/docs/appendix/glossary/base.md +++ b/docs/dev/reference/glossary/base.md @@ -8,4 +8,4 @@ short_description: A physical, mobile platform that the other parts of a mobile A physical, mobile platform that the other parts of a mobile robot attach to. For example, a wheeled rover, boat, or flying drone. -For more information see [Base Component](/components/base/). +For more information see [Base Component](/operate/reference/components/base/). diff --git a/docs/appendix/glossary/board.md b/docs/dev/reference/glossary/board.md similarity index 79% rename from docs/appendix/glossary/board.md rename to docs/dev/reference/glossary/board.md index 0114b3b3a6..9d6e135a63 100644 --- a/docs/appendix/glossary/board.md +++ b/docs/dev/reference/glossary/board.md @@ -9,4 +9,4 @@ A board is the signal wire hub of a machine that provides access to GPIO pins. Examples of boards include Jetson, Raspberry Pi, Numato, or Arduino. -For more information see [Board Component](/components/board/). +For more information see [Board Component](/operate/reference/components/board/). diff --git a/docs/appendix/glossary/client-application.md b/docs/dev/reference/glossary/client-application.md similarity index 100% rename from docs/appendix/glossary/client-application.md rename to docs/dev/reference/glossary/client-application.md diff --git a/docs/appendix/glossary/component.md b/docs/dev/reference/glossary/component.md similarity index 85% rename from docs/appendix/glossary/component.md rename to docs/dev/reference/glossary/component.md index cd8b7e2f1b..26326d156b 100644 --- a/docs/appendix/glossary/component.md +++ b/docs/dev/reference/glossary/component.md @@ -8,4 +8,4 @@ A resource that often represents a physical piece of hardware in a machine which Each component is typed by a proto API, such as the [component proto definitions](https://github.com/viamrobotics/api/tree/main/proto/viam/component). -For more information, see [Components](/configure/#components). +For more information, see [Components](/operate/get-started/supported-hardware/). diff --git a/docs/appendix/glossary/fragment.md b/docs/dev/reference/glossary/fragment.md similarity index 75% rename from docs/appendix/glossary/fragment.md rename to docs/dev/reference/glossary/fragment.md index c167df6ce6..fcc59a0765 100644 --- a/docs/appendix/glossary/fragment.md +++ b/docs/dev/reference/glossary/fragment.md @@ -1,11 +1,11 @@ --- title: Fragment id: fragment -full_link: /configure/#fragments +full_link: /manage/fleet/reuse-configuration/ short_description: A reusable configuration block that you can share across multiple machines. --- A reusable configuration block that you can share across multiple machines. For example, if you are deploying a specific mobile machine that is always physically connected the same way, you can create a fragment to make managing your fleet easy. -For more information, see [Fragments](/fleet/fragments/). +For more information, see [Fragments](/manage/fleet/reuse-configuration/). diff --git a/docs/appendix/glossary/frame-system.md b/docs/dev/reference/glossary/frame-system.md similarity index 85% rename from docs/appendix/glossary/frame-system.md rename to docs/dev/reference/glossary/frame-system.md index 29d9593ffd..1b7f97691e 100644 --- a/docs/appendix/glossary/frame-system.md +++ b/docs/dev/reference/glossary/frame-system.md @@ -1,7 +1,7 @@ --- title: Frame System id: frame-system -full_link: /services/frame-system/ +full_link: /operate/mobility/define-geometry/ short_description: The frame system holds reference frame information for the relative position of components in space. --- diff --git a/docs/appendix/glossary/frame.md b/docs/dev/reference/glossary/frame.md similarity index 100% rename from docs/appendix/glossary/frame.md rename to docs/dev/reference/glossary/frame.md diff --git a/docs/appendix/glossary/gantry.md b/docs/dev/reference/glossary/gantry.md similarity index 100% rename from docs/appendix/glossary/gantry.md rename to docs/dev/reference/glossary/gantry.md diff --git a/docs/appendix/glossary/grpc.md b/docs/dev/reference/glossary/grpc.md similarity index 100% rename from docs/appendix/glossary/grpc.md rename to docs/dev/reference/glossary/grpc.md diff --git a/docs/appendix/glossary/index.md b/docs/dev/reference/glossary/index.md similarity index 100% rename from docs/appendix/glossary/index.md rename to docs/dev/reference/glossary/index.md diff --git a/docs/appendix/glossary/location.md b/docs/dev/reference/glossary/location.md similarity index 83% rename from docs/appendix/glossary/location.md rename to docs/dev/reference/glossary/location.md index c3be0b89e0..36d4b81d86 100644 --- a/docs/appendix/glossary/location.md +++ b/docs/dev/reference/glossary/location.md @@ -1,10 +1,10 @@ --- title: Location id: location -full_link: /cloud/locations/ +full_link: /manage/reference/organize/ short_description: A location is a virtual grouping of machines that allows you to organize machines and manage access to your fleet. --- A location is a virtual grouping of machines that allows you to organize machines and manage access to your fleet. -For more information, see [Manage Locations and Sub-Locations](/cloud/locations/). +For more information, see [Manage Locations and Sub-Locations](/manage/reference/organize/). diff --git a/docs/appendix/glossary/machine-config.md b/docs/dev/reference/glossary/machine-config.md similarity index 74% rename from docs/appendix/glossary/machine-config.md rename to docs/dev/reference/glossary/machine-config.md index 19bbd293d3..1a965ba177 100644 --- a/docs/appendix/glossary/machine-config.md +++ b/docs/dev/reference/glossary/machine-config.md @@ -7,4 +7,4 @@ short_description: The complete configuration of a single machine part. The complete configuration of a single machine {{< glossary_tooltip term_id="part" text="part" >}}. -For more information, see [Configuration](/configure/). +For more information, see [Configuration](/operate/get-started/supported-hardware/). diff --git a/docs/appendix/glossary/machine.md b/docs/dev/reference/glossary/machine.md similarity index 83% rename from docs/appendix/glossary/machine.md rename to docs/dev/reference/glossary/machine.md index 7da2f42eb6..e71a038726 100644 --- a/docs/appendix/glossary/machine.md +++ b/docs/dev/reference/glossary/machine.md @@ -7,4 +7,4 @@ short_description: An organizational concept, consisting of a computer and the c A smart machine is an organizational concept, consisting of either one _{{< glossary_tooltip term_id="part" text="part" >}}_, or multiple _parts_ working closely together to complete tasks. -For more information, see [Machines](/cloud/machines/). +For more information, see [Machines](/operate/get-started/setup/#what-is-a-machine). diff --git a/docs/dev/reference/glossary/model-namespace-triplet.md b/docs/dev/reference/glossary/model-namespace-triplet.md new file mode 100644 index 0000000000..d6fa141f11 --- /dev/null +++ b/docs/dev/reference/glossary/model-namespace-triplet.md @@ -0,0 +1,10 @@ +--- +title: Model Namespace Triplet +id: model-namespace-triplet +short_description: namespace:module-name:name or rdk:builtin:name +--- + +{{< glossary_tooltip term_id="model" text="Models" >}} are uniquely namespaced as colon-delimited-triplets. +Modular resource model names have the form `namespace:module-name:model-name`, for example `esmeraldaLabs:sensors:moisture`. +Built-in model names have the form `rdk:builtin:name`, for example `rdk:builtin:gpio`. +See [Write your module](/operate/get-started/other-hardware/#write-your-module) for more information. diff --git a/docs/appendix/glossary/model.md b/docs/dev/reference/glossary/model.md similarity index 60% rename from docs/appendix/glossary/model.md rename to docs/dev/reference/glossary/model.md index 0265a5ed5e..83193b8e2b 100644 --- a/docs/appendix/glossary/model.md +++ b/docs/dev/reference/glossary/model.md @@ -5,15 +5,15 @@ full_link: short_description: A particular implementation of a resource. For example, UR5e is a model of the arm component subtype. --- -A particular implementation of a {{< glossary_tooltip term_id="resource" text="resource" >}} {{< glossary_tooltip term_id="subtype" text="subtype" >}} that implements its [API](/appendix/apis/). +A particular implementation of a {{< glossary_tooltip term_id="resource" text="resource" >}} {{< glossary_tooltip term_id="subtype" text="subtype" >}} that implements its [API](/dev/reference/apis/). Models allow you to control hardware or software of a similar category, such as motors, with a consistent set of methods as an interface, even if the underlying implementation differs. -For example, some _models_ of DC motors communicate using [GPIO](/components/board/), while other DC motors use serial protocols like the SPI bus. +For example, some _models_ of DC motors communicate using [GPIO](/operate/reference/components/board/), while other DC motors use serial protocols like the SPI bus. Regardless, you can power any motor model that implements the `rdk:component:motor` API with the `SetPower()` method. -Models are either included with [`viam-server`](/architecture/viam-server/) or provided through {{< glossary_tooltip term_id="module" text="modules" >}}. +Models are either included with [`viam-server`](/operate/reference/viam-server/) or provided through {{< glossary_tooltip term_id="module" text="modules" >}}. All models are uniquely namespaced as colon-delimited-triplets. Built-in model names have the form `rdk:builtin:name`. -Modular resource model names have the form `namespace:repo-name:name`. -See [Name your new resource model](/how-tos/create-module/#name-your-new-resource-model) for more information. +Modular resource model names have the form `namespace:module-name:model-name`. +See [Write your module](/operate/get-started/other-hardware/#write-your-module) for more information. diff --git a/docs/appendix/glossary/modular-resource.md b/docs/dev/reference/glossary/modular-resource.md similarity index 82% rename from docs/appendix/glossary/modular-resource.md rename to docs/dev/reference/glossary/modular-resource.md index ba46d4abf4..0fe8fbe40a 100644 --- a/docs/appendix/glossary/modular-resource.md +++ b/docs/dev/reference/glossary/modular-resource.md @@ -1,7 +1,6 @@ --- title: Modular Resource id: modular-resource -full_link: /registry/ short_description: A modular resource is a model of a component or service provided by a module. --- @@ -9,4 +8,4 @@ A modular resource is a {{< glossary_tooltip term_id="model" text="model" >}} of A modular resource runs in a module process. This differs from built-in resources, which run as part of `viam-server`. -For more information see the [Modular Resource Documentation](/registry/). +For more information see the [Integrate other hardware](/operate/get-started/other-hardware/#write-your-module). diff --git a/docs/appendix/glossary/module.md b/docs/dev/reference/glossary/module.md similarity index 73% rename from docs/appendix/glossary/module.md rename to docs/dev/reference/glossary/module.md index d5756e5dbe..381e73cfbc 100644 --- a/docs/appendix/glossary/module.md +++ b/docs/dev/reference/glossary/module.md @@ -8,6 +8,4 @@ short_description: A module provides one or more modular resources, which add re A _module_ provides one or more {{< glossary_tooltip term_id="modular-resource" text="modular resources" >}}, which add {{< glossary_tooltip term_id="resource" text="resource" >}} {{< glossary_tooltip term_id="type" text="types" >}} or {{< glossary_tooltip term_id="model" text="models" >}} that are not built into Viam. Modules run alongside `viam-server` as separate process, communicating with `viam-server` over UNIX sockets. -You can [create your own module](/how-tos/create-module/) or [add existing modules from the Viam Registry](/registry/modular-resources/). - -For more information see the [modular resource documentation](/registry/). +You can [create your own module](/operate/get-started/other-hardware/) or [add existing modules from the Viam Registry](/operate/get-started/supported-hardware/). diff --git a/docs/appendix/glossary/mql.md b/docs/dev/reference/glossary/mql.md similarity index 84% rename from docs/appendix/glossary/mql.md rename to docs/dev/reference/glossary/mql.md index 5ba1999523..99c20e8f5d 100644 --- a/docs/appendix/glossary/mql.md +++ b/docs/dev/reference/glossary/mql.md @@ -7,4 +7,4 @@ short_description: MQL is the MongoDB query language, similar to SQL but specifi MQL is the [MongoDB query language](https://www.mongodb.com/docs/manual/tutorial/query-documents/), similar to {{< glossary_tooltip term_id="sql" text="SQL" >}} but specific to the MongoDB document model. -You can use MQL to query data that you have synced to the Viam app using the [data management service](/services/data/). +You can use MQL to query data that you have synced to the Viam app using the [data management service](/data-ai/capture-data/capture-sync/). diff --git a/docs/appendix/glossary/organization.md b/docs/dev/reference/glossary/organization.md similarity index 80% rename from docs/appendix/glossary/organization.md rename to docs/dev/reference/glossary/organization.md index 4334d9be57..a586578ff6 100644 --- a/docs/appendix/glossary/organization.md +++ b/docs/dev/reference/glossary/organization.md @@ -1,7 +1,7 @@ --- title: Organization id: organization -full_link: /fleet/organizations/ +full_link: /manage/reference/organize/ short_description: An organization is a group of one or more locations that helps you organize your fleet and manage who has access to your fleet. --- @@ -9,4 +9,4 @@ An organization is the highest level grouping in the Viam platform, which genera Every {{< glossary_tooltip term_id="location" text="location" >}} is grouped into an organization. You can also have organizations for departments or other entities, or for personal use. -For more information, see [Manage Organizations](/cloud/organizations/). +For more information, see [Organize your machines](/manage/reference/organize/). diff --git a/docs/dev/reference/glossary/part.md b/docs/dev/reference/glossary/part.md new file mode 100644 index 0000000000..d8d6c73332 --- /dev/null +++ b/docs/dev/reference/glossary/part.md @@ -0,0 +1,10 @@ +--- +title: Part +id: part +full_link: /architecture/parts/ +short_description: A single-board computer, desktop, laptop, or other computer running viam-server, the hardware components attached to it, and any services or other resources running on it. +--- + +Smart machines are organized into _parts_, where each part represents a computer (a single-board computer, desktop, laptop, or other computer) running `viam-server`, the hardware {{< glossary_tooltip term_id="component" text="components" >}} attached to it, and any {{< glossary_tooltip term_id="service" text="services" >}} or other resources running on it. + +For more information, see [Machine Architecture: Parts](/operate/reference/architecture/parts/). diff --git a/docs/appendix/glossary/pin-number.md b/docs/dev/reference/glossary/pin-number.md similarity index 100% rename from docs/appendix/glossary/pin-number.md rename to docs/dev/reference/glossary/pin-number.md diff --git a/docs/appendix/glossary/process.md b/docs/dev/reference/glossary/process.md similarity index 90% rename from docs/appendix/glossary/process.md rename to docs/dev/reference/glossary/process.md index 4f38391e68..ec3c2e4ea2 100644 --- a/docs/appendix/glossary/process.md +++ b/docs/dev/reference/glossary/process.md @@ -7,4 +7,4 @@ short_description: Managed instances of programs or scripts running on a machine Processes are instances of programs, invoked by commands running binaries or scripts on a {{< glossary_tooltip term_id="part" text="part" >}}. -Find more information in [Configure a Process to Run on Your Machine](/configure/processes/). +Find more information in [Configure a Process to Run on Your Machine](/manage/reference/processes/). diff --git a/docs/appendix/glossary/protobuf.md b/docs/dev/reference/glossary/protobuf.md similarity index 100% rename from docs/appendix/glossary/protobuf.md rename to docs/dev/reference/glossary/protobuf.md diff --git a/docs/appendix/glossary/rdk.md b/docs/dev/reference/glossary/rdk.md similarity index 68% rename from docs/appendix/glossary/rdk.md rename to docs/dev/reference/glossary/rdk.md index a4b9c9bf95..8b5ae926fd 100644 --- a/docs/appendix/glossary/rdk.md +++ b/docs/dev/reference/glossary/rdk.md @@ -1,8 +1,8 @@ --- title: RDK (Robot Development Kit) id: rdk -full_link: /architecture/viam-server/ +full_link: /operate/reference/viam-server/ short_description: The official Viam-developed codebase that provides all functionality of an SDK and more. --- -Viam’s Robot Development Kit (RDK) is the [open-source](https://github.com/viamrobotics/rdk), on-machine portion of the Viam platform, that provides [`viam-server`](/architecture/viam-server/) and the Go SDK. +Viam’s Robot Development Kit (RDK) is the [open-source](https://github.com/viamrobotics/rdk), on-machine portion of the Viam platform, that provides [`viam-server`](/operate/reference/viam-server/) and the Go SDK. diff --git a/docs/appendix/glossary/remote-part.md b/docs/dev/reference/glossary/remote-part.md similarity index 69% rename from docs/appendix/glossary/remote-part.md rename to docs/dev/reference/glossary/remote-part.md index f8c66ac38b..950c9b1c3e 100644 --- a/docs/appendix/glossary/remote-part.md +++ b/docs/dev/reference/glossary/remote-part.md @@ -8,4 +8,4 @@ aka: A machine part which is controlled by another machine part. -For more information, see [Machine Architecture: Parts](/architecture/parts/). +For more information, see [Machine Architecture: Parts](/operate/reference/architecture/parts/). diff --git a/docs/appendix/glossary/resource.md b/docs/dev/reference/glossary/resource.md similarity index 90% rename from docs/appendix/glossary/resource.md rename to docs/dev/reference/glossary/resource.md index 38c6147fca..64908efa2f 100644 --- a/docs/appendix/glossary/resource.md +++ b/docs/dev/reference/glossary/resource.md @@ -17,4 +17,4 @@ Resources are individual, addressable elements of a machine. Each part has local resources and can also have resources from another {{< glossary_tooltip term_id="remote-part" text="remote">}} machine part. The capabilities of each resource are exposed through the part’s API. -Each resource on your machine implements either one of the [existing Viam APIs](/appendix/apis/), or a [custom interface](/registry/advanced/#new-api-subtypes). +Each resource on your machine implements either one of the [existing Viam APIs](/dev/reference/apis/), or a [custom interface](/registry/advanced/#new-api-subtypes). diff --git a/docs/appendix/glossary/sdk.md b/docs/dev/reference/glossary/sdk.md similarity index 90% rename from docs/appendix/glossary/sdk.md rename to docs/dev/reference/glossary/sdk.md index 6c5eb52314..d4854df97e 100644 --- a/docs/appendix/glossary/sdk.md +++ b/docs/dev/reference/glossary/sdk.md @@ -1,7 +1,7 @@ --- title: SDK (Software Development Kit) id: sdk -full_link: /appendix/apis/ +full_link: /dev/reference/apis/ short_description: Viam provides software development kits (SDKs) to help you write client applications and create support for custom component types. --- @@ -9,4 +9,4 @@ Viam provides software development kits (SDKs) to help you write client applicat The SDKs wrap the `viam-server` {{< glossary_tooltip term_id="grpc" text="gRPC" >}} {{< glossary_tooltip term_id="viam-robot-api" text="Viam Robot API" >}} and streamline connection, authentication, and encryption. -For more information, see [Interact with Resources with Viam's Client SDKs](/appendix/apis/). +For more information, see [Interact with Resources with Viam's Client SDKs](/dev/reference/apis/). diff --git a/docs/appendix/glossary/service.md b/docs/dev/reference/glossary/service.md similarity index 80% rename from docs/appendix/glossary/service.md rename to docs/dev/reference/glossary/service.md index 2073208f3b..411d695ab6 100644 --- a/docs/appendix/glossary/service.md +++ b/docs/dev/reference/glossary/service.md @@ -8,4 +8,4 @@ Services are built-in software packages for complex capabilities such as Simulta Each service is typed by a proto API, such as the [service proto definitions](https://github.com/viamrobotics/api/tree/main/proto/viam/service). -For more information, see [Services](/configure/#services). +For more information, see [Services](/operate/get-started/supported-hardware/#add-software-services-to-your-machine). diff --git a/docs/appendix/glossary/setup.md b/docs/dev/reference/glossary/setup.md similarity index 100% rename from docs/appendix/glossary/setup.md rename to docs/dev/reference/glossary/setup.md diff --git a/docs/appendix/glossary/slam.md b/docs/dev/reference/glossary/slam.md similarity index 79% rename from docs/appendix/glossary/slam.md rename to docs/dev/reference/glossary/slam.md index ccdd0af799..dd39f4fc4c 100644 --- a/docs/appendix/glossary/slam.md +++ b/docs/dev/reference/glossary/slam.md @@ -1,10 +1,10 @@ --- title: SLAM id: slam -full_link: /services/slam/ +full_link: /operate/reference/services/slam/ short_description: Simultaneous Localization And Mapping (SLAM) algorithms use data from a machine's sensors to generate a map of the environment and determine the machine's position within it. --- SLAM (Simultaneous Localization and Mapping) algorithms use data from a machine's sensors, like LiDARs, cameras, and movement sensors, to generate a map of the environment and determine the machine's position within it. -For more information, see [SLAM](/services/slam/). +For more information, see [SLAM](/operate/reference/services/slam/). diff --git a/docs/appendix/glossary/smart-machine.md b/docs/dev/reference/glossary/smart-machine.md similarity index 100% rename from docs/appendix/glossary/smart-machine.md rename to docs/dev/reference/glossary/smart-machine.md diff --git a/docs/appendix/glossary/sql.md b/docs/dev/reference/glossary/sql.md similarity index 85% rename from docs/appendix/glossary/sql.md rename to docs/dev/reference/glossary/sql.md index cc05e3ff16..3615114646 100644 --- a/docs/appendix/glossary/sql.md +++ b/docs/dev/reference/glossary/sql.md @@ -7,4 +7,4 @@ short_description: SQL (structured query language) is the widely-used, industry- [SQL (structured query language)](https://en.wikipedia.org/wiki/SQL) is the widely-used, industry-standard query language popular with [relational databases](https://en.wikipedia.org/wiki/Relational_database). -You can use SQL to query data that you have synced to the Viam app using the [data management service](/services/data/). +You can use SQL to query data that you have synced to the Viam app using the [data management service](/data-ai/capture-data/capture-sync/). diff --git a/docs/appendix/glossary/subtype.md b/docs/dev/reference/glossary/subtype.md similarity index 84% rename from docs/appendix/glossary/subtype.md rename to docs/dev/reference/glossary/subtype.md index c2e6b58399..613e4f0c2c 100644 --- a/docs/appendix/glossary/subtype.md +++ b/docs/dev/reference/glossary/subtype.md @@ -11,6 +11,6 @@ Resource models belonging to a subtype share the same API. For example, an arm is a subtype of the {{< glossary_tooltip term_id="component" text="component" >}} resource type, while the `ur5e` is a {{< glossary_tooltip term_id="model" text="model" >}} of the arm subtype's API. -The [Vision Service](/services/vision/) is a subtype of the {{< glossary_tooltip term_id="service" text="service" >}} resource type. +The [Vision Service](/operate/reference/services/vision/) is a subtype of the {{< glossary_tooltip term_id="service" text="service" >}} resource type. A subtype is designated by its {{< glossary_tooltip term_id="api-namespace-triplet" text="api-namespace-triplet" >}}. diff --git a/docs/appendix/glossary/type.md b/docs/dev/reference/glossary/type.md similarity index 100% rename from docs/appendix/glossary/type.md rename to docs/dev/reference/glossary/type.md diff --git a/docs/appendix/glossary/viam-agent.md b/docs/dev/reference/glossary/viam-agent.md similarity index 84% rename from docs/appendix/glossary/viam-agent.md rename to docs/dev/reference/glossary/viam-agent.md index 5a6f066b56..b101495707 100644 --- a/docs/appendix/glossary/viam-agent.md +++ b/docs/dev/reference/glossary/viam-agent.md @@ -8,4 +8,4 @@ short_description: The Viam provisioning application for deploying viam-server. The Viam Agent is a provisioning application for deploying and managing `viam-server` across a fleet of machines. You can use the Viam Agent to provision a machine as it first comes online with a pre-defined configuration, including WiFi networks or additional build or provision steps. -See [Provision Machines](/fleet/provision/) for more information. +See [Provision Machines](/manage/fleet/provision/setup/) for more information. diff --git a/docs/appendix/glossary/viam-micro-server.md b/docs/dev/reference/glossary/viam-micro-server.md similarity index 72% rename from docs/appendix/glossary/viam-micro-server.md rename to docs/dev/reference/glossary/viam-micro-server.md index 39523d7ea2..c081e603ce 100644 --- a/docs/appendix/glossary/viam-micro-server.md +++ b/docs/dev/reference/glossary/viam-micro-server.md @@ -1,7 +1,7 @@ --- title: viam-micro-server id: viam-micro-server -full_link: /architecture/#viam-server-and-viam-micro-server +full_link: /operate/reference/viam-micro-server/ short_description: The lightweight version of viam-server that can run on ESP32 devices. --- @@ -9,4 +9,4 @@ The lightweight version of `viam-server`, built for microcontrollers. `viam-micro-server` is a set of open-source utilities which run on your microcontroller and provides Viam functionality to your machine. `viam-micro-server` is built from the micro-RDK. -For more information see [Architecture](/architecture/#viam-server-and-viam-micro-server). +For more information see [Architecture](/operate/reference/viam-micro-server/). diff --git a/docs/appendix/glossary/viam-robot-api.md b/docs/dev/reference/glossary/viam-robot-api.md similarity index 100% rename from docs/appendix/glossary/viam-robot-api.md rename to docs/dev/reference/glossary/viam-robot-api.md diff --git a/docs/appendix/glossary/viam-server.md b/docs/dev/reference/glossary/viam-server.md similarity index 68% rename from docs/appendix/glossary/viam-server.md rename to docs/dev/reference/glossary/viam-server.md index 475622c606..088b7c941a 100644 --- a/docs/appendix/glossary/viam-server.md +++ b/docs/dev/reference/glossary/viam-server.md @@ -1,11 +1,11 @@ --- title: viam-server id: viam-server -full_link: /architecture/#viam-server-and-viam-micro-server +full_link: /operate/reference/viam-server/ short_description: The executable binary which runs on and provides functionality to machines. --- The open-source executable binary that runs on your machine's computer (such as a single-board computer or a server) and provides most Viam functionality. `viam-server` is built from the RDK. -For more information see [Architecture](/architecture/#viam-server-and-viam-micro-server). +For more information see [Architecture](/operate/reference/viam-server/). diff --git a/docs/appendix/glossary/web-sockets.md b/docs/dev/reference/glossary/web-sockets.md similarity index 100% rename from docs/appendix/glossary/web-sockets.md rename to docs/dev/reference/glossary/web-sockets.md diff --git a/docs/appendix/glossary/webrtc.md b/docs/dev/reference/glossary/webrtc.md similarity index 100% rename from docs/appendix/glossary/webrtc.md rename to docs/dev/reference/glossary/webrtc.md diff --git a/docs/architecture/machine-to-machine-comms.md b/docs/dev/reference/machine-to-machine-comms.md similarity index 99% rename from docs/architecture/machine-to-machine-comms.md rename to docs/dev/reference/machine-to-machine-comms.md index 5db1661d11..f461354460 100644 --- a/docs/architecture/machine-to-machine-comms.md +++ b/docs/dev/reference/machine-to-machine-comms.md @@ -7,6 +7,7 @@ description: "Explanation of how a machine and its parts interact at the communi aliases: - "/internals/robot-to-robot-comms/" - "/internals/machine-to-machine-comms/" + - "/architecture/machine-to-machine-comms/" toc_hide: true --- diff --git a/docs/dev/reference/sdks/_index.md b/docs/dev/reference/sdks/_index.md new file mode 100644 index 0000000000..a05f93ecad --- /dev/null +++ b/docs/dev/reference/sdks/_index.md @@ -0,0 +1,38 @@ +--- +title: "Write control code with Viam SDKs" +linkTitle: "SDKs" +weight: 10 +type: "docs" +aliases: + - /sdks/ +--- + +### Backend SDKs + +The backend SDKs allow you to build business logic to control [components](/dev/reference/apis/#component-apis) and [services](/dev/reference/apis/#service-apis), as well as manage your [fleet](/dev/reference/apis/fleet/) and [data](/dev/reference/apis/data-client/), and [billing information](/dev/reference/apis/billing-client/), or [provision](/manage/fleet/provision/setup/) machines. +With the backend SDKs you can also create custom {{< glossary_tooltip term_id="modular-resource" text="modular resources" >}}. + +{{< sectionlist-custom class="horizontal" >}} +{{% sectionlist-custom-item link="/dev/reference/sdks/python/" %}} +{{% sectionlist-custom-item link="/dev/reference/sdks/go/" %}} +{{% sectionlist-custom-item link="/dev/reference/sdks/cpp/" %}} +{{< /sectionlist-custom >}} +
+ +### Frontend SDKs + +The frontend TypeScript SDK allows you to control your machine's [components](/dev/reference/apis/#component-apis), as well as manage your [data](/dev/reference/apis/data-client/) or [provision](/manage/fleet/provision/setup/) machines. + +{{< sectionlist-custom class="horizontal" >}} +{{% sectionlist-custom-item link="/dev/reference/sdks/typescript/" %}} +{{< /sectionlist-custom >}} +
+ +### Mobile SDK + +The mobile SDK allows you to build iOS and Android apps to control your machine's [components](/dev/reference/apis/#component-apis), as well as manage your [fleet](/dev/reference/apis/fleet/) and [data](/dev/reference/apis/data-client/), or [provision](/manage/fleet/provision/setup/) machines. + +{{< sectionlist-custom class="horizontal">}} +{{% sectionlist-custom-item link="/dev/reference/sdks/flutter/" %}} +{{< /sectionlist-custom >}} +
diff --git a/docs/sdks/connectivity.md b/docs/dev/reference/sdks/connectivity.md similarity index 51% rename from docs/sdks/connectivity.md rename to docs/dev/reference/sdks/connectivity.md index 5ebedc6cd1..3d6876e782 100644 --- a/docs/sdks/connectivity.md +++ b/docs/dev/reference/sdks/connectivity.md @@ -8,27 +8,28 @@ tags: ["client", "sdk", "viam-server", "networking", "apis", "robot api", "session"] aliases: - /program/connectivity/ + - /sdks/connectivity/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- -When connecting to a machine using the connection code from the [**CONNECT** tab](/sdks/#code-samples), a [client session](/appendix/apis/sessions/) automatically uses the most efficient route to connect to your machine either through local LAN or WAN or the internet. +When connecting to a machine using the connection code from the [**CONNECT** tab](/dev/reference/sdks/), a [client session](/dev/reference/apis/sessions/) automatically uses the most efficient route to connect to your machine either through local LAN or WAN or the internet. When a machine loses its connection to the internet but is still connected to a LAN or WAN: - Client sessions connected through the same LAN or WAN will function normally. - Client sessions connected through the internet will timeout and end. If the client is on the same LAN or WAN but the route it chose to connect is through the internet, the client will automatically disconnect and then reconnect over LAN. -- Cloud sync for the [data management service](/services/data/) will pause until the internet connection is re-established since the machine will be unable to connect to the [Viam app](https://app.viam.com). +- Cloud sync for the [data management service](/data-ai/capture-data/capture-sync/) will pause until the internet connection is re-established since the machine will be unable to connect to the [Viam app](https://app.viam.com). When a machine loses its connection to LAN or WAN, all client sessions will timeout and end by default. ## Client session timeout and end -When your client cannot connect to your machine's `viam-server` instance, `viam-server` will end any current client [_sessions_](/appendix/apis/sessions/) on this machine and all client operations will [timeout automatically](/appendix/apis/sessions/) and halt: any active commands will be cancelled, stopping any moving parts, and no new commands will be able to reach the machine until the connection is restored. +When your client cannot connect to your machine's `viam-server` instance, `viam-server` will end any current client [_sessions_](/dev/reference/apis/sessions/) on this machine and all client operations will [timeout automatically](/dev/reference/apis/sessions/) and halt: any active commands will be cancelled, stopping any moving parts, and no new commands will be able to reach the machine until the connection is restored. -To disable the default behavior and manage resource timeout and reconfiguration over a networking session yourself, you can [disable the default behavior](/appendix/apis/sessions/#disable-default-session-management) of session management, then use [Viam's SDKs](/sdks/) in your code to make calls to [the session management API](https://pkg.go.dev/go.viam.com/rdk/session#hdr-API). +To disable the default behavior and manage resource timeout and reconfiguration over a networking session yourself, you can [disable the default behavior](/dev/reference/apis/sessions/#disable-default-session-management) of session management, then use [Viam's SDKs](/dev/reference/sdks/) in your code to make calls to [the session management API](https://pkg.go.dev/go.viam.com/rdk/session#hdr-API). ## Configure a connection timeout -When connecting to a machine using the [robot API](/appendix/apis/robot/) from a supported [Viam SDK](/appendix/apis/), you can configure an [optional timeout](/appendix/apis/robot/#configure-a-timeout) to account for intermittent or delayed network connectivity. +When connecting to a machine using the [robot API](/dev/reference/apis/robot/) from a supported [Viam SDK](/dev/reference/apis/), you can configure an [optional timeout](/dev/reference/apis/robot/#configure-a-timeout) to account for intermittent or delayed network connectivity. diff --git a/docs/sdks/cpp.md b/docs/dev/reference/sdks/cpp.md similarity index 100% rename from docs/sdks/cpp.md rename to docs/dev/reference/sdks/cpp.md diff --git a/docs/sdks/flutter.md b/docs/dev/reference/sdks/flutter.md similarity index 100% rename from docs/sdks/flutter.md rename to docs/dev/reference/sdks/flutter.md diff --git a/docs/sdks/go.md b/docs/dev/reference/sdks/go.md similarity index 100% rename from docs/sdks/go.md rename to docs/dev/reference/sdks/go.md diff --git a/docs/sdks/python/_index.md b/docs/dev/reference/sdks/python/_index.md similarity index 100% rename from docs/sdks/python/_index.md rename to docs/dev/reference/sdks/python/_index.md diff --git a/docs/sdks/python/python-venv.md b/docs/dev/reference/sdks/python/python-venv.md similarity index 95% rename from docs/sdks/python/python-venv.md rename to docs/dev/reference/sdks/python/python-venv.md index abd36daca9..34518e1a80 100644 --- a/docs/sdks/python/python-venv.md +++ b/docs/dev/reference/sdks/python/python-venv.md @@ -9,6 +9,7 @@ tags: ["client", "sdk", "application", "sdk", "fleet", "program", "python", "venv"] aliases: - /program/python-venv/ + - /sdks/python/python-venv/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- @@ -67,7 +68,7 @@ pip3 install viam-sdk This installs the Viam Python SDK and all required general dependencies. -If you intend to use the [ML (machine learning) model service](/services/ml/), install the Python SDK using the `mlmodel` extra: +If you intend to use the [ML (machine learning) model service](/data-ai/ai/deploy/), install the Python SDK using the `mlmodel` extra: ```sh {class="command-line" data-prompt="$"} pip3 install 'viam-sdk[mlmodel]' @@ -100,4 +101,4 @@ Your IDE will now recognize all packages installed in this environment. ## Start building -You are now ready to [start using Viam's Python SDK](/sdks/)! +You are now ready to [start using Viam's Python SDK](/dev/reference/sdks/)! diff --git a/docs/sdks/typescript.md b/docs/dev/reference/sdks/typescript.md similarity index 100% rename from docs/sdks/typescript.md rename to docs/dev/reference/sdks/typescript.md diff --git a/docs/sdks/use-extra-params.md b/docs/dev/reference/sdks/use-extra-params.md similarity index 83% rename from docs/sdks/use-extra-params.md rename to docs/dev/reference/sdks/use-extra-params.md index 3e1f980460..522bbfa21c 100644 --- a/docs/sdks/use-extra-params.md +++ b/docs/dev/reference/sdks/use-extra-params.md @@ -9,20 +9,21 @@ tags: ["sdk", "extra", "extend"] aliases: - /program/sdks/use-extra-params - /program/use-extra-params/ + - /sdks/use-extra-params/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- -How to [use](#use) and [define](#define) the `extra` parameters that many {{< glossary_tooltip term_id="resource" text="resource" >}} [API methods](/appendix/apis/) offer in the Go and Python SDKs. +How to [use](#use) and [define](#define) the `extra` parameters that many {{< glossary_tooltip term_id="resource" text="resource" >}} [API methods](/dev/reference/apis/) offer in the Go and Python SDKs. ## Use You can use `extra` parameters with modular {{< glossary_tooltip term_id="resource" text="resource" >}} implementations that are _models_ of built-in resource types. -For example, a new model of [sensor](/components/sensor/), or a new model of {{< glossary_tooltip term_id="slam" text="SLAM" >}} service. +For example, a new model of [sensor](/operate/reference/components/sensor/), or a new model of {{< glossary_tooltip term_id="slam" text="SLAM" >}} service. -The `extra` parameters in that built-in resource type's [API](/appendix/apis/) allow users to pass information to a resource's driver that isn't specified as a parameter for all models of the resource type. -This is necessary to keep the API of resource types consistent across, for example, all models of [motor](/components/motor/) or all models of [camera](/components/camera/). +The `extra` parameters in that built-in resource type's [API](/dev/reference/apis/) allow users to pass information to a resource's driver that isn't specified as a parameter for all models of the resource type. +This is necessary to keep the API of resource types consistent across, for example, all models of [motor](/operate/reference/components/motor/) or all models of [camera](/operate/reference/components/camera/). Send extra information in an API call in `extra` parameters as follows: @@ -94,9 +95,9 @@ If `extra` information must be passed to a resource, it is handled within a new, {{%expand "Click for instructions on defining a custom model to use extra params" %}} To do this, define a custom implementation of the resource's API as a new _model_, and modify the resource's API methods to handle the `extra` information you send. -Follow the steps in the [Modular Resources documentation](/how-tos/create-module/) to do so. +Follow the steps in the [Modular Resources documentation](/operate/get-started/other-hardware/) to do so. -For an example of how to check the values of keys in an `extra` parameter of a built-in resource [API method](/appendix/apis/), reference this modification to the built-in [sensor](/components/sensor/) resource type's [Readings](/appendix/apis/components/sensor/#getreadings) method in the code of a [new sensor model](/registry/): +For an example of how to check the values of keys in an `extra` parameter of a built-in resource [API method](/dev/reference/apis/), reference this modification to the built-in [sensor](/operate/reference/components/sensor/) resource type's [Readings](/dev/reference/apis/components/sensor/#getreadings) method in the code of a new sensor model: {{< tabs >}} {{% tab name="Python" %}} @@ -162,6 +163,6 @@ func (s *mySensor) Readings(ctx context.Context, extra map[string]interface{}) ( {{% /tab %}} {{% /tabs %}} -See [Extend Viam with Modular Resources](/registry/) for more information and [instructions](/registry/) on modifying built-in API specifications. +See [Integrate other hardware](/operate/get-started/other-hardware/) for more information and instructions on modifying built-in API specifications. {{% /expand%}} diff --git a/docs/appendix/try-viam/_index.md b/docs/dev/reference/try-viam/_index.md similarity index 86% rename from docs/appendix/try-viam/_index.md rename to docs/dev/reference/try-viam/_index.md index ca20ff5140..9730ed417f 100644 --- a/docs/appendix/try-viam/_index.md +++ b/docs/dev/reference/try-viam/_index.md @@ -19,6 +19,7 @@ aliases: - "/appendix/try-viam-faq/" - "/try-viam/faq/" - "get-started/try-viam/faq/" + - /appendix/try-viam/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- @@ -44,7 +45,7 @@ See detailed instructions.

{{}} 3. Get started with Viam -

Try a Viam Rover in our robotics lab. Drive or program the rover to see how you can build a machine with Viam. You can also try services like computer vision.

+

Try a Viam Rover in our robotics lab. Drive or program the rover to see how you can build a machine with Viam.

@@ -52,7 +53,6 @@ See detailed instructions.

## Next steps {{< cards >}} -{{% card link="/how-tos/drive-rover/" %}} -{{% card link="/how-tos/detect-color/" %}} -{{% card link="/appendix/try-viam/rover-resources/" %}} +{{% card link="/tutorials/control/drive-rover/" %}} +{{% card link="/dev/reference/try-viam/rover-resources/" %}} {{< /cards >}} diff --git a/docs/appendix/try-viam/reserve-a-rover.md b/docs/dev/reference/try-viam/reserve-a-rover.md similarity index 95% rename from docs/appendix/try-viam/reserve-a-rover.md rename to docs/dev/reference/try-viam/reserve-a-rover.md index cc81f78360..18aa993ca7 100644 --- a/docs/appendix/try-viam/reserve-a-rover.md +++ b/docs/dev/reference/try-viam/reserve-a-rover.md @@ -51,8 +51,7 @@ When using a rented Viam rover, adding [modules](/registry/) is disabled for sec ## Next steps {{< cards >}} -{{% card link="/how-tos/drive-rover/" %}} -{{% card link="/how-tos/detect-color/" %}} +{{% card link="/tutorials/control/drive-rover/" %}} {{< /cards >}} ## FAQ @@ -131,14 +130,13 @@ If you would like to, you can [extend your reservation](/appendix/try-viam/reser Yes! You can borrow the rover as many times as you’d like. Here are some tutorials which you can follow: -- [Drive with the Viam SDK](/how-tos/drive-rover/) -- [Detect a Color](/how-tos/detect-color/) +- [Drive with the Viam SDK](/tutorials/control/drive-rover/) If you want to get your own Viam Rover, [you can](https://viam.com/resources/rover). ### Why can't I use the rover's microphone? For security reasons, Viam has disabled the microphone on rover rentals. -The microphone on [Viam Rovers shipped to you](/appendix/try-viam/rover-resources/) functions normally. +The microphone on [Viam Rovers shipped to you](/dev/reference/try-viam/rover-resources/) functions normally. {{< snippet "social.md" >}} diff --git a/docs/appendix/try-viam/rover-resources/_index.md b/docs/dev/reference/try-viam/rover-resources/_index.md similarity index 98% rename from docs/appendix/try-viam/rover-resources/_index.md rename to docs/dev/reference/try-viam/rover-resources/_index.md index ab27d19ee1..5b7c6b1404 100644 --- a/docs/appendix/try-viam/rover-resources/_index.md +++ b/docs/dev/reference/try-viam/rover-resources/_index.md @@ -12,6 +12,7 @@ aliases: - "/rover-resources/" - "/try-viam/rover-resources/" - "/get-started/try-viam/rover-resources/" + - /appendix/try-viam/rover-resources/ description: If you want a convenient mobile base for robotics projects, order a Viam rover and set it up. date: "2022-01-01" # updated: "" # When the content was last entirely checked diff --git a/docs/appendix/try-viam/rover-resources/rover-tutorial-1.md b/docs/dev/reference/try-viam/rover-resources/rover-tutorial-1.md similarity index 97% rename from docs/appendix/try-viam/rover-resources/rover-tutorial-1.md rename to docs/dev/reference/try-viam/rover-resources/rover-tutorial-1.md index 1a00e5d575..2d4a96f2db 100644 --- a/docs/appendix/try-viam/rover-resources/rover-tutorial-1.md +++ b/docs/dev/reference/try-viam/rover-resources/rover-tutorial-1.md @@ -11,13 +11,14 @@ aliases: - "/rover-resources/rover-tutorial/" - "/try-viam/rover-resources/rover-tutorial/" - "/get-started/try-viam/rover-resources/rover-tutorial/" + - /appendix/try-viam/rover-resources/rover-tutorial-1/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- {{% alert title="Tip" color="tip" %}} A new version of the Viam Rover is now available, the [Viam Rover 2](https://www.viam.com/resources/rover). -If you have purchased a Viam Rover 2, follow [these instructions](/appendix/try-viam/rover-resources/rover-tutorial/) instead. +If you have purchased a Viam Rover 2, follow [these instructions](/dev/reference/try-viam/rover-resources/rover-tutorial/) instead. {{% /alert %}} The [Viam Rover 1](https://www.viam.com/resources/rover) arrives preassembled with two encoded motors with suspension, a webcam with a microphone unit, and a 3D accelerometer module. @@ -250,21 +251,20 @@ Follow the instructions to install `viam-server` on **Linux / Aarch64**. {{< glossary_tooltip term_id="RDK" text="RDK" >}} type. `ssh` into your Pi and follow the setup instructions to install and run `viam-server` on the machine. -To configure your rover so you can start driving it, [add the Viam Fragment to your Machine](/appendix/try-viam/rover-resources/rover-tutorial-fragments/). +To configure your rover so you can start driving it, [add the Viam Fragment to your Machine](/dev/reference/try-viam/rover-resources/rover-tutorial-fragments/). ## Next steps Before you can use your Viam rover with the Viam platform you need to configure your rover: {{< cards >}} -{{% card link="/appendix/try-viam/rover-resources/rover-tutorial-fragments/" %}} +{{% card link="/dev/reference/try-viam/rover-resources/rover-tutorial-fragments/" %}} {{< /cards >}} After you have configured your rover, follow one of these tutorials: {{< cards >}} -{{% card link="/how-tos/drive-rover/" %}} -{{% card link="/how-tos/detect-color/" %}} +{{% card link="/tutorials/control/drive-rover/" %}} {{% card link="/tutorials/services/navigate-with-rover-base/" %}} {{< /cards >}} diff --git a/docs/appendix/try-viam/rover-resources/rover-tutorial-fragments.md b/docs/dev/reference/try-viam/rover-resources/rover-tutorial-fragments.md similarity index 98% rename from docs/appendix/try-viam/rover-resources/rover-tutorial-fragments.md rename to docs/dev/reference/try-viam/rover-resources/rover-tutorial-fragments.md index 29b51f3d83..88ba86d5cb 100644 --- a/docs/appendix/try-viam/rover-resources/rover-tutorial-fragments.md +++ b/docs/dev/reference/try-viam/rover-resources/rover-tutorial-fragments.md @@ -8,6 +8,7 @@ description: "Configure your rover by adding the Viam-provided configuration fra aliases: - "/try-viam/rover-resources/rover-tutorial-fragments/" - "/get-started/try-viam/rover-resources/rover-tutorial-fragments/" + - /appendix/try-viam/rover-resources/rover-tutorial-fragments/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- @@ -20,7 +21,7 @@ Viam provides reusable {{% glossary_tooltip term_id="fragment" text="*fragments* - An assembled Viam Rover. For assembly instructions, see [Unbox and Set Up your Viam Rover](../rover-tutorial/) - The board is connected to the [Viam app](https://app.viam.com). - To add your Pi to the Viam app, refer to [the rover setup guide](/appendix/try-viam/rover-resources/rover-tutorial/#control-your-rover-on-the-viam-app). + To add your Pi to the Viam app, refer to [the rover setup guide](/dev/reference/try-viam/rover-resources/rover-tutorial/#control-your-rover-on-the-viam-app). ## Add the fragment @@ -207,7 +208,6 @@ The fragment you added is read-only, but if you need to modify your rover's conf After you have configured your rover, follow one of these tutorials: {{< cards >}} -{{% card link="/how-tos/drive-rover/" %}} -{{% card link="/how-tos/detect-color/" %}} +{{% card link="/tutorials/control/drive-rover/" %}} {{% card link="/tutorials/services/navigate-with-rover-base/" %}} {{< /cards >}} diff --git a/docs/appendix/try-viam/rover-resources/rover-tutorial/_index.md b/docs/dev/reference/try-viam/rover-resources/rover-tutorial/_index.md similarity index 97% rename from docs/appendix/try-viam/rover-resources/rover-tutorial/_index.md rename to docs/dev/reference/try-viam/rover-resources/rover-tutorial/_index.md index 0718c9ad67..40d2470935 100644 --- a/docs/appendix/try-viam/rover-resources/rover-tutorial/_index.md +++ b/docs/dev/reference/try-viam/rover-resources/rover-tutorial/_index.md @@ -10,13 +10,14 @@ description: "A list of the contents of the Viam Rover 2 kit, instructions for w no_list: true aliases: - "/get-started/try-viam/rover-resources/rover-tutorial" + - /appendix/try-viam/rover-resources/rover-tutorial/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- {{% alert title="Tip" color="tip" %}} Another version of the Viam Rover was sold until January 2024. -If you have purchased a Viam Rover 1, follow [these instructions](/appendix/try-viam/rover-resources/rover-tutorial-1/) instead. +If you have purchased a Viam Rover 1, follow [these instructions](/dev/reference/try-viam/rover-resources/rover-tutorial-1/) instead. {{% /alert %}} The [Viam Rover 2](https://www.viam.com/resources/rover) arrives preassembled with two encoded motors with suspension, a webcam with a microphone unit, a 6 axis IMU, power management and more. @@ -354,7 +355,7 @@ If not, you will have to take off the ribbon cable and use [dupont connectors](h {{< expand "Raspberry Pi 5" >}} If you are using a Raspberry Pi 5, use the same screw placements as for the Raspberry Pi 4. The hardware setup is the same. -The only difference is in the [configuration](/appendix/try-viam/rover-resources/rover-tutorial-fragments/). +The only difference is in the [configuration](/dev/reference/try-viam/rover-resources/rover-tutorial-fragments/). {{< /expand >}} Then connect the webcam's USB lead to any USB port on your board. @@ -393,21 +394,20 @@ If you followed the instructions in the [Pi installation guide](/installation/pr If not, add a new machine in the [Viam app](https://app.viam.com) and follow the {{< glossary_tooltip term_id="setup" text="setup instructions" >}} until your machine is connected. -To configure your rover so you can start driving it, [add a Viam Rover 2 Fragment to your machine](/appendix/try-viam/rover-resources/rover-tutorial-fragments/). +To configure your rover so you can start driving it, [add a Viam Rover 2 Fragment to your machine](/dev/reference/try-viam/rover-resources/rover-tutorial-fragments/). ## Next steps Before you can use your Viam rover with the Viam platform you need to configure your rover: {{< cards >}} -{{% card link="/appendix/try-viam/rover-resources/rover-tutorial-fragments/" %}} +{{% card link="/dev/reference/try-viam/rover-resources/rover-tutorial-fragments/" %}} {{< /cards >}} After you have configured your rover, follow one of these tutorials: {{< cards >}} -{{% card link="/how-tos/drive-rover/" %}} -{{% card link="/how-tos/detect-color/" %}} +{{% card link="/tutorials/control/drive-rover/" %}} {{% card link="/tutorials/services/navigate-with-rover-base/" %}} {{< /cards >}} diff --git a/docs/appendix/try-viam/rover-resources/rover-tutorial/jetson-rover-setup.md b/docs/dev/reference/try-viam/rover-resources/rover-tutorial/jetson-rover-setup.md similarity index 90% rename from docs/appendix/try-viam/rover-resources/rover-tutorial/jetson-rover-setup.md rename to docs/dev/reference/try-viam/rover-resources/rover-tutorial/jetson-rover-setup.md index 189e228db6..89330e5d3d 100644 --- a/docs/appendix/try-viam/rover-resources/rover-tutorial/jetson-rover-setup.md +++ b/docs/dev/reference/try-viam/rover-resources/rover-tutorial/jetson-rover-setup.md @@ -9,6 +9,7 @@ imageAlt: "A Viam Rover 2 in a box" description: "Instructions for setting up a Viam Rover 2 with a Jetson Nano or Jetson Orin Nano." aliases: - /get-started/try-viam/rover-resources/rover-tutorial/jetson-rover-setup/ + - /appendix/try-viam/rover-resources/rover-tutorial/jetson-rover-setup/ date: "2022-01-01" # updated: "" # When the content was last entirely checked --- @@ -70,9 +71,9 @@ Some states do not allow the exclusion or disclaimer of implied warranties, so t 1. Install the WiFi board/device on the Nano. Follow the manufacturer's instructions to do so. 2. Power the Jetson Nano with a power supply and [prepare the device and install `viam-server`](/installation/prepare/jetson-nano-setup/). 3. Switch back to the main guide and complete these two steps: - [Add the power supply](/appendix/try-viam/rover-resources/rover-tutorial/#add-the-power-supply) and [Configure the low-voltage cutoff circuit](/appendix/try-viam/rover-resources/rover-tutorial/#configure-the-low-voltage-cutoff-circuit). + [Add the power supply](/dev/reference/try-viam/rover-resources/rover-tutorial/#add-the-power-supply) and [Configure the low-voltage cutoff circuit](/dev/reference/try-viam/rover-resources/rover-tutorial/#configure-the-low-voltage-cutoff-circuit). 4. Unscrew the top of the rover with the biggest Allen key. -5. Take the [height extenders](/appendix/try-viam/rover-resources/rover-tutorial/#whats-inside-the-kit) provided in your kit. +5. Take the [height extenders](/dev/reference/try-viam/rover-resources/rover-tutorial/#whats-inside-the-kit) provided in your kit. Apply them to the rover chassis posts. 6. Unscrew the standoffs in the motherboard and relocate them to the Jetson board hole pattern: {{}} 7. Connect the ribbon cable to the motherboard and Jetson Nano. @@ -133,9 +134,9 @@ Some states do not allow the exclusion or disclaimer of implied warranties, so t 1. Power the Jetson Orin Nano with a power supply and [prepare the device and install `viam-server`](/installation/prepare/jetson-nano-setup/). 2. Switch back to the main guide and complete these two steps: - [Add the power supply](/appendix/try-viam/rover-resources/rover-tutorial/#add-the-power-supply) and [Configure the low-voltage cutoff circuit](/appendix/try-viam/rover-resources/rover-tutorial/#configure-the-low-voltage-cutoff-circuit). + [Add the power supply](/dev/reference/try-viam/rover-resources/rover-tutorial/#add-the-power-supply) and [Configure the low-voltage cutoff circuit](/dev/reference/try-viam/rover-resources/rover-tutorial/#configure-the-low-voltage-cutoff-circuit). 3. Unscrew the top of the rover with the biggest Allen key. -4. Take the [height extenders](/appendix/try-viam/rover-resources/rover-tutorial/#whats-inside-the-kit) provided in your kit. +4. Take the [height extenders](/dev/reference/try-viam/rover-resources/rover-tutorial/#whats-inside-the-kit) provided in your kit. Apply them to the rover chassis posts. 5. Unscrew the standoffs in the motherboard and relocate them to the Jetson board hole pattern: {{}} 6. **IMPORTANT:** Disconnect the 5V buck converter. Unlike other boards, the Jetson Orin Nano requires a 7-20V input, which means that the board must be powered directly from the battery. @@ -156,13 +157,12 @@ Some states do not allow the exclusion or disclaimer of implied warranties, so t If you followed the instructions in the [Jetson installation guide](/installation/prepare/jetson-nano-setup/), you should have already made an account on the [Viam app](https://app.viam.com), installed `viam-server` on the board, and added a new machine. -To configure your rover so you can start driving it, [add a Viam Rover 2 Fragment to your machine](/appendix/try-viam/rover-resources/rover-tutorial-fragments/). +To configure your rover so you can start driving it, [add a Viam Rover 2 Fragment to your machine](/dev/reference/try-viam/rover-resources/rover-tutorial-fragments/). ## Next steps After adding the appropriate fragment, follow one of these tutorials with your borrowed or owned rover: {{< cards >}} -{{% card link="/how-tos/drive-rover/" %}} -{{% card link="/how-tos/detect-color/" %}} +{{% card link="/tutorials/control/drive-rover/" %}} {{< /cards >}} diff --git a/docs/appendix/try-viam/try-viam-tutorial.md b/docs/dev/reference/try-viam/try-viam-tutorial.md similarity index 98% rename from docs/appendix/try-viam/try-viam-tutorial.md rename to docs/dev/reference/try-viam/try-viam-tutorial.md index 80d34339c2..5aad38c6d8 100644 --- a/docs/appendix/try-viam/try-viam-tutorial.md +++ b/docs/dev/reference/try-viam/try-viam-tutorial.md @@ -229,7 +229,6 @@ You can [copy this `JSON` config between rental rovers](/appendix/try-viam/reser If you have questions, check out our [FAQ](/appendix/try-viam/reserve-a-rover/) or join our [Discord Community](https://discord.gg/viam), where you can ask questions and meet other people working on robots. {{< cards >}} -{{% card link="/how-tos/drive-rover/" %}} -{{% card link="/how-tos/detect-color/" %}} -{{% card link="/appendix/try-viam/rover-resources/" %}} +{{% card link="/tutorials/control/drive-rover/" %}} +{{% card link="/dev/reference/try-viam/rover-resources/" %}} {{< /cards >}} diff --git a/docs/dev/tools/_index.md b/docs/dev/tools/_index.md new file mode 100644 index 0000000000..6b665d4d6c --- /dev/null +++ b/docs/dev/tools/_index.md @@ -0,0 +1,10 @@ +--- +linkTitle: "Tools" +title: "Tools" +weight: 100 +layout: "empty" +type: "docs" +empty_node: true +open_on_desktop: true +header_only: true +--- diff --git a/docs/cli.md b/docs/dev/tools/cli.md similarity index 96% rename from docs/cli.md rename to docs/dev/tools/cli.md index 4d1ff250da..8357d13602 100644 --- a/docs/cli.md +++ b/docs/dev/tools/cli.md @@ -1,7 +1,7 @@ --- title: "Viam CLI" linkTitle: "CLI" -weight: 700 +weight: 10 type: "docs" no_list: true description: "Manage and control your machines from the command line." @@ -9,7 +9,7 @@ aliases: - "/build/program/cli" - /manage/cli/ - /fleet/cli/ -menuindent: true + - /cli/ images: ["/platform/cli.png"] date: "2024-08-23" # updated: "" # When the content was last entirely checked @@ -18,10 +18,10 @@ date: "2024-08-23" The Viam CLI (command line interface) tool enables you to manage your machines and {{< glossary_tooltip term_id="modular-resource" text="modular resources" >}} across organizations and locations from the command line. The CLI lets you: -- Retrieve [organization](/cloud/organizations/) and location information -- Manage [machine fleet](/fleet/) data and logs +- Retrieve [organization](/dev/reference/glossary/#organization) and location information +- Manage fleet data and logs - Control machines by issuing component and service commands -- Upload and manage [modular resources](/registry/) in the Viam Registry +- Upload and manage modular resources in the [Viam Registry](https://app.viam.com/registry/) For example, this CLI command moves a servo to the 75 degree position: @@ -153,7 +153,7 @@ You will need both to authenticate. {{% alert title="Important" color="note" %}} Keep these key values safe. By default, new organization API keys are created with **Owner** permissions, giving the key full read and write access to all machines within your organization. -You can change an API key's permissions from the Viam app on the [organizations page](/cloud/organizations/) by clicking the **Show details** link next to your API key. +You can change an API key's permissions from the Viam app on the [organizations page](/manage/reference/organize/) by clicking the **Show details** link next to your API key. {{% /alert %}} Once created, you can use the organization API key to authenticate future CLI sessions or to [use the SDKs](/sdks/#authentication). @@ -192,7 +192,7 @@ You will need both to authenticate. {{% alert title="Important" color="note" %}} Keep these key values safe. By default, new location API keys are created with **Owner** permissions, giving the key full read and write access to all machines within your location. -You can change an API key's permissions from the Viam app on the [organizations page](/cloud/organizations/) by clicking the **Show details** link next to your API key. +You can change an API key's permissions from the Viam app on the [organizations page](/manage/reference/organize/) by clicking the **Show details** link next to your API key. {{% /alert %}} Once created, you can use the location API key to authenticate future CLI sessions or to [connect to machines with the SDK](/sdks/#authentication). @@ -395,7 +395,7 @@ Its **File ID** is shown under the **DETAILS** subtab that appears on the right. You cannot use filter arguments, such as `--start` or `--end` when using `ids`. -See [Datasets](/fleet/dataset/) for more information. +See [Create a dataset](/data-ai/ai/create-dataset/) for more information. ##### Using the `filter` argument @@ -424,7 +424,7 @@ Removing the `viam data export` string, you can use the same filter parameters ( You cannot use the `--file-ids` argument when using `filter`. -See [Datasets](/fleet/dataset/) for more information. +See [Create a dataset](/data-ai/ai/create-dataset/) for more information. ### `data` @@ -493,8 +493,8 @@ done | -------------- | ----------- | -------------------- | | `export` | Export data in a specified format to a specified location. | - | | `tag` | Add or remove tags from data matching the ids or filter. | `ids`, `filter` | -| `database configure` | Create a new database user for the Viam organization's MongoDB Atlas Data Federation instance, or change the password of an existing user. See [Configure data query](/how-tos/sensor-data-query-with-third-party-tools/#configure-data-query). | - | -| `database hostname` | Get the MongoDB Atlas Data Federation instance hostname and connection URI. See [Configure data query](/how-tos/sensor-data-query-with-third-party-tools/#configure-data-query). | - | +| `database configure` | Create a new database user for the Viam organization's MongoDB Atlas Data Federation instance, or change the password of an existing user. See [Configure data query](/data-ai/data/query/#configure-data-query). | - | +| `database hostname` | Get the MongoDB Atlas Data Federation instance hostname and connection URI. See [Configure data query](/data-ai/data/query/#configure-data-query). | - | | `delete binary` | Delete binary data from the Viam Cloud. | - | | `delete tabular` | Delete tabular data from the Viam Cloud. | - | | `--help` | Return help | - | @@ -541,7 +541,7 @@ done ### `locations` -The `locations` command allows you to manage the [locations](/cloud/locations/) that you have access to. +The `locations` command allows you to manage the [locations](/manage/reference/organize/) that you have access to. With it, you can list available locations, filter locations by organization, or create a new location API key. ```sh {class="command-line" data-prompt="$"} @@ -628,10 +628,10 @@ This includes: - Building your module for different architectures using cloud runners - Building a module locally and running it on a target device. Rebuilding & restarting if already running. -See [Upload a module](/how-tos/upload-module/) and [Update an existing module](/how-tos/manage-modules/#update-an-existing-module) for more information. +See [Update and manage modules you created](/operate/get-started/other-hardware/manage-modules/) for more information. If you update and release your module as part of a continuous integration (CI) workflow, you can also -[automatically upload new versions of your module on release](/how-tos/manage-modules/#update-an-existing-module-using-a-github-action) using a GitHub Action. +[automatically upload new versions of your module on release](/operate/get-started/other-hardware/manage-modules/#update-an-existing-module-using-a-github-action) using a GitHub Action. ```sh {class="command-line" data-prompt="$"} viam module generate @@ -716,7 +716,7 @@ viam module upload --version=1.0.0 --platform=darwin/arm64 packaged-module.tar.g | `--local-only` | Create a meta.json file for local use, but don't create the module on the backend (default: `false`). | `create` | Optional | | `--name` | The name of the custom module to be created | `create` | **Required** | | `--org-id` | The organization ID to associate the module to. See [Using the `--org-id` argument](#using-the---org-id-and---public-namespace-arguments) | `create`, `upload` | **Required** | -| `--public-namespace` | The [namespace](/cloud/organizations/#create-a-namespace-for-your-organization) to associate the module to. See [Using the `--public-namespace` argument](#using-the---org-id-and---public-namespace-arguments) | `create`, `upload` | **Required** | +| `--public-namespace` | The namespace to associate the module to. See [Using the `--public-namespace` argument](#using-the---org-id-and---public-namespace-arguments) | `create`, `upload` | **Required** | | `--platform` | The architecture of your module binary. See [Using the `--platform` argument](#using-the---platform-argument) | `upload`, `build logs` | **Required** | | `--tags` | Comma-separated list of platform tags that determine to which platforms this binary can be deployed. Examples: `distro:debian,distro:ubuntu, os_version:22.04,os_codename:jammy`. For a machine to use an uploaded binary, all tags must be satisfied as well as the `--platform` field.
  • `distro`: Distribution. You can find this in `/etc/os-release`. `"debian"` or `"ubuntu"`.
  • `os_version`: Operating System version. On Linux, you can find this in `/etc/os-release`. Example for linux: `22.04`. On Mac, run `sw_vers --productVersion` and use the major version only. Example for mac: `14`.
  • `codename`: The operating system codename. Find this in `/etc/os-release`. For example: `"bullseye"`, `"bookworm"`, or `"jammy"`.
  • `cuda`: Whether using CUDA compiler. Run `nvcc --version`. For example: `"true"`.
  • `cuda_version`: The CUDA compiler version. Run `nvcc --version`. For example: `"11"` or `"12"`.
  • `jetpack`: Version of the NVIDIA JetPack SDK. Run `apt-cache show nvidia-jetpack`. For example: `"5"`.
  • `pi`: Version of the raspberry pi: `"4"` or `"5"`.
  • `pifull`: Compute module or model number, for example `cm5p` or `5B`.
| `upload` | Optional | | `--version` | The version of your module to set for this upload. See [Using the `--version` argument](#using-the---version-argument) | `upload` | **Required** | @@ -726,7 +726,7 @@ viam module upload --version=1.0.0 --platform=darwin/arm64 packaged-module.tar.g All of the `module` commands accept either the `--org-id` or `--public-namespace` argument. -- Use the `--public-namespace` argument to supply the [namespace](/cloud/organizations/#create-a-namespace-for-your-organization) of your organization. This will upload your module to the Viam Registry and share it with other users. +- Use the `--public-namespace` argument to supply the namespace of your organization. This will upload your module to the Viam Registry and share it with other users. - Use the `--org-id` to provide your organization ID instead, This will upload your module privately within your organization. You may use either argument for the `viam module create` command, but must use `--public-namespace` for the `update` and `upload` commands when uploading as a public module (`"visibility": "public"`) to the Viam Registry. @@ -803,14 +803,14 @@ The `meta.json` file includes the following configuration options: module_id string Required - The name of the module, including its namespace. + The name of the module, including its namespace. visibility string Required - Whether the module is accessible only to members of your organization (private), or visible to all Viam users (public). You can change this setting later using the viam module update command.

Default: private + Whether the module is accessible only to members of your organization (private), or visible to all Viam users (public). You can change this setting later using the viam module update command.

Default: private url @@ -858,14 +858,12 @@ For example, the following represents the configuration of an example `my-module ``` {{% alert title="Important" color="note" %}} -If you are publishing a public module (`"visibility": "public"`), the [namespace of your model](/how-tos/create-module/#name-your-new-resource-model) must match the [namespace of your organization](/cloud/organizations/#create-a-namespace-for-your-organization). +If you are publishing a public module (`"visibility": "public"`), the namespace of your model match the namespace of your [organization](/dev/reference/glossary/#organization). In the example above, the model namespace is set to `acme` to match the owning organization's namespace. If the two namespaces do not match, the command will return an error. {{% /alert %}} -See [Upload a module](/how-tos/upload-module/) and [Update an existing module](/how-tos/manage-modules/#update-an-existing-module) for a detailed walkthrough of the `viam module` commands. - -See [Modular resources](/registry/) for a conceptual overview of modules and the modular resource system at Viam. +See [Update and manage modules you created](/operate/get-started/other-hardware/manage-modules/) for a detailed walkthrough of the `viam module` commands. ##### Using the `build` subcommand @@ -1258,7 +1256,7 @@ The `--stream` argument, when included in the CLI command prior to the `--data` ### `training-script` -Manage training scripts for [custom ML training](/registry/training-scripts/). +Manage training scripts for [custom ML training](/data-ai/ai/train/). ```sh {class="command-line" data-prompt="$"} viam training-script upload --framework= --org-id= --path= --script-name= --type= @@ -1404,8 +1402,8 @@ viam whoami ### `auth-app` -The `auth-app` command allows you to register, update, and get your web or mobile application (created with the Viam Flutter or TypeScript [SDKs](/sdks/)) with [FusionAuth](https://fusionauth.io/) (the tool Viam uses for authentication and authorization) so that you or other users can log into your app with the same credentials they use to log into the [Viam app](https://app.viam.com). -The user's credentials allow them the same [permissions](/cloud/rbac/) to organizations, locations, and machines that they have in the Viam app. +The `auth-app` command allows you to register, update, and get your web or mobile application (created with the Viam Flutter or TypeScript [SDKs](/dev/reference/sdks/)) with [FusionAuth](https://fusionauth.io/) (the tool Viam uses for authentication and authorization) so that you or other users can log into your app with the same credentials they use to log into the [Viam app](https://app.viam.com). +The user's credentials allow them the same [permissions](/manage/manage/rbac/) to organizations, locations, and machines that they have in the Viam app. ```sh {class="command-line" data-prompt="$" data-output="2-8,10-14"} viam auth-app register --org-id= --application-name= --origin-uris= --redirect-uris= --logout-uri= diff --git a/docs/appendix/troubleshooting.md b/docs/dev/tools/common-errors.md similarity index 83% rename from docs/appendix/troubleshooting.md rename to docs/dev/tools/common-errors.md index ad567fdcfc..8e28a40707 100644 --- a/docs/appendix/troubleshooting.md +++ b/docs/dev/tools/common-errors.md @@ -1,16 +1,19 @@ --- -title: "Troubleshooting" -linkTitle: "Troubleshooting" -weight: 40 +title: "Common Errors & Known Issues" +linkTitle: "Common Errors" +weight: 50 type: "docs" description: "A guide to troubleshooting a Viam-based machine or system of machines with fixes to common problems." date: "2022-01-01" +no_list: true # updated: "" # When the content was last entirely checked --- This document lists common errors encountered when working with `viam-server` and the [Viam app](https://app.viam.com), and provides simple steps to resolve them. While many common issues and their possible resolutions are presented here, this list is not comprehensive. +To view logs or get a remote shell on a machine see [Troubleshoot](/manage/troubleshoot/troubleshoot/). + If you have encountered an error that is not listed here, we'd love to hear from you on our [Community Discord](https://discord.gg/viam)! Please post the error message you received along with how you were able to trigger it and we'll see if we can help. @@ -18,43 +21,11 @@ Please post the error message you received along with how you were able to trigg For information on the status of [app.viam.com](https://app.viam.com), visit [status.viam.com](https://status.viam.com/). -## Enable debug level logs - -The default log level for `viam-server` and any running resources is `"Info"`. -If you are not seeing helpful logs, you can try changing the log level to `"Debug"`. - -{{< tabs >}} -{{% tab name="For individual resources" %}} - -Add the `log_configuration` option to the resource's JSON configuration: - -```json -"log_configuration": { - "level": "Debug" -}, -"attributes": { ... } -``` - -{{% /tab %}} -{{% tab name="For viam-server" %}} - -Add `"debug": true` to the machine's configuration: - -```json -{ - "debug": true, - "components": [{ ... }] -} -``` - -{{% /tab %}} -{{< /tabs >}} - ## Common installation errors ### The authenticity of host 'hostname.local' can't be established -**Description:** When following our [installation guides](/installation/viam-server-setup/), you will likely encounter this message the first time you try to make an `ssh` connection to your newly-imaged {{< glossary_tooltip term_id="board" text="board" >}}. +**Description:** When following our [installation guides](/operate/get-started/setup/), you will likely encounter this message the first time you try to make an `ssh` connection to your newly-imaged {{< glossary_tooltip term_id="board" text="board" >}}. This is expected: `ssh` is advising you that it has not yet connected to this address, and prompts you for how to proceed. **Solution:** The message will ask `Are you sure you want to continue connecting?`. @@ -72,8 +43,8 @@ This is only required for the first `ssh` connection you make to a newly-imaged - Your `ssh` connection string should resemble the following: `ssh username@hostname.local`. Be sure that you match hostname, username, and password exactly to what you initially configured when imaging your board. - If you are still unable to connect, restart your board and try your `ssh` connection again after a few minutes. -- If that fails, try re-imaging your board following the [installation guide](/installation/viam-server-setup/) appropriate for your board. - - If using the [Raspberry Pi installation guide](/installation/prepare/rpi-setup/), be sure to carefully enter the configuration details under the **Advanced Options** (gear icon) button on the [Raspberry Pi imager](https://www.raspberrypi.com/software/) before you re-image your board. +- If that fails, try re-imaging your board following the [installation guide](/operate/get-started/setup/#quickstart) appropriate for your board. + - If using the [Raspberry Pi installation guide](/operate/reference/prepare/rpi-setup/), be sure to carefully enter the configuration details under the **Advanced Options** (gear icon) button on the [Raspberry Pi imager](https://www.raspberrypi.com/software/) before you re-image your board. - If you re-imaged your board and provided a different hostname, you may need to accept the `ssh` host key again by typing `yes` when prompted. - If you re-imaged your board and provided the same hostname, you may see an error message similar to `WARNING: REMOTE HOST IDENTIFICATION HAS CHANGED!`. - If so, edit your `~/.ssh/known_hosts` file to delete any single lines that begin with the board hostname you specified (like `hostname.local` or similar). @@ -94,9 +65,9 @@ This is only required for the first `ssh` connection you make to a newly-imaged **Full Error:** `Something went wrong trying to read the squashfs image. Open dir error: No such file or directory` -**Description:** The `viam-server` [installation](/installation/viam-server-setup/) or [update](/installation/manage-viam-server/#update-viam-server) process may have been interrupted partway, with some files either partially-written or missing. +**Description:** The `viam-server` [installation](/operate/get-started/setup/) or [update](/operate/reference/viam-server/manage-viam-server/#update-viam-server) process may have been interrupted partway, with some files either partially-written or missing. -**Solution:** Reinstall `viam-server` following the [installation instructions](/installation/viam-server-setup/). +**Solution:** Reinstall `viam-server` following the [installation instructions](/operate/get-started/setup/). ### AppImages require FUSE to run @@ -164,7 +135,7 @@ When a machine is disconnected, it will continue to run with its locally-cached It should be listed as `active (running)`. - If it is listed as `stopped` or `failed`, you can try restarting it with `sudo systemctl start viam-server`. - - If the command returns the message `Unit viam-server.service could not be found`, be sure you have followed the [installation instructions for your board](/installation/viam-server-setup/#platform-requirements), and then followed the {{< glossary_tooltip term_id="setup" text="setup instructions" >}}. + - If the command returns the message `Unit viam-server.service could not be found`, be sure you have followed the [installation instructions for your board](/operate/get-started/setup/#quickstart), and then followed the {{< glossary_tooltip term_id="setup" text="setup instructions" >}}. - If none of the above succeed in getting `viam-server` up and running, check the logs on your board for any pertinent error messages. Depending on your board's specific Linux OS, you might use a command similar to the following to show the 50 most recent log messages from `viam-server`. Run this command from within an `ssh` session to the board: @@ -176,7 +147,7 @@ When a machine is disconnected, it will continue to run with its locally-cached **Full Error:** `Error: cannot parse config: JSON: cannot unmarshal string into Go struct field Component.components.frame of type float64.` -**Description:** A [frame](/services/frame-system/) attribute may be malformed, and is preventing the parsing of the component's configuration. +**Description:** A [frame](/operate/mobility/define-geometry/) attribute may be malformed, and is preventing the parsing of the component's configuration. **Solution:** Check the **CONFIGURE** tab for your machine in the [Viam app](https://app.viam.com) and look for a `frame` attribute, either in **Frame** or **JSON** mode. If you see a `frame` attribute that you didn't create yourself, delete the whole `frame` object from the JSON config. diff --git a/docs/dev/tools/tutorials.md b/docs/dev/tools/tutorials.md new file mode 100644 index 0000000000..c9184f96e2 --- /dev/null +++ b/docs/dev/tools/tutorials.md @@ -0,0 +1,30 @@ +--- +title: "Tutorials" +linkTitle: "Tutorials" +weight: 30 +type: docs +layout: "tutorials" +videos: + [ + "/tutorials/videos/scuttle-gamepad-preview.webm", + "/tutorials/videos/scuttle-gamepad-preview.mp4", + ] +videoAlt: "Drive a Scuttle robot with a Bluetooth gamepad." +images: + [ + "/tutorials/videos/scuttle-gamepad-preview.gif", + "/tutorials/try-viam-sdk/image1.gif", + ] +description: "Build a machine yourself by following along with a tutorial." +no_list: true +hide_children: true +sitemap: + priority: 1.0 +aliases: + - /build/ +outputs: + - rss + - html +date: "2024-10-20" +# updated: "" # When the content was last entirely checked +--- diff --git a/docs/fleet/_index.md b/docs/fleet/_index.md deleted file mode 100644 index f18f453dd1..0000000000 --- a/docs/fleet/_index.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: "Fleet Tools" -linkTitle: "Fleet Tools" -weight: 430 -type: "docs" -layout: "empty" -canonical: "/platform/" -aliases: - - "/manage/fleet-management" - - "/manage/app-usage" - - "/product-overviews/fleet-management/" - - "/fleet/" - - /manage/fleet/ - - /manage/ -menuindent: true -empty_node: true ---- diff --git a/docs/fleet/control.md b/docs/fleet/control.md deleted file mode 100644 index 06d5906bdf..0000000000 --- a/docs/fleet/control.md +++ /dev/null @@ -1,142 +0,0 @@ ---- -title: "Machine Control Interface" -linkTitle: "Control Interface" -weight: 30 -type: "docs" -description: "Use the Viam app control tab or the Viam mobile app to monitor and remotely operate your machines." -tags: ["fleet management", "control", "app"] -images: ["/components/base/cropped-control.png"] -date: "2024-10-22" -# updated: "" # When the content was last entirely checked ---- - -Once you have [configured components and services](/configure/) for your machine, you can test, monitor, and remotely operate them from the **CONTROL** tab in the [Viam app](https://app.viam.com) or the [Viam mobile app](/fleet/control/#control-interface-in-the-viam-mobile-app). - -## Control interface in the Viam app - -The **CONTROL** tab in the [Viam app](https://app.viam.com) gives you the ability to test, monitor, and operate the machines in your fleet. -The **CONTROL** tab provides a control interface for each component and service that you have configured for you machine. - -For example, if you have configured a base with wheels, you can move your machine's with an arrow pad and control the base's speed by setting its power with a slider. -If you have configured a camera component, a window in the **CONTROL** tab displays the camera output. - -If you use remote control in the [Viam app](https://app.viam.com) UI, all communication to the machine uses [WebRTC](https://pkg.go.dev/go.viam.com/utils@v0.0.3/rpc#hdr-Connection). -For local communication between [parts](/architecture/parts/#machine-parts) Viam uses gRPC or WebRTC. - -{{}} - -You can also switch between different machine parts directly from the **CONTROL** tab and control the selected machine part. - -For more information on configuring and controlling machine parts, see [Machine Architecture](/architecture/parts/#machine-parts). - -### Components - -For more detailed information on how to operate and test your resources, expand the relevant resource below: - -{{% expand "Arm" %}} -{{< readfile "/static/include/components/test-control/arm-control.md" >}} -{{% /expand%}} - -{{% expand "Base" %}} -{{< readfile "/static/include/components/test-control/base-control.md" >}} -{{% /expand%}} - -{{% expand "Board" %}} - -## Test `analogs` - -{{< readfile "/static/include/components/board/test-board-analogs.md" >}} - -## Test `digital_interrupts` - -{{< readfile "/static/include/components/board/test-board-digital-interrupts.md" >}} -{{% /expand%}} - -{{% expand "Camera" %}} -{{< readfile "/static/include/components/camera-view-camera-stream.md" >}} -{{% /expand%}} - -{{% expand "Encoder" %}} -{{< readfile "/static/include/components/test-control/encoder-control.md" >}} -{{% /expand%}} - -{{% expand "Gantry" %}} -{{< readfile "/static/include/components/test-control/gantry-control.md" >}} -{{% /expand%}} - -{{% expand "Generic component" %}} -{{< readfile "/static/include/components/test-control/generic-control.md" >}} -{{% /expand%}} - -{{% expand "Gripper" %}} -{{< readfile "/static/include/components/test-control/gripper-control.md" >}} -{{% /expand%}} - -{{% expand "Input controller" %}} -{{< readfile "/static/include/components/test-control/input-controller-control.md" >}} -{{% /expand%}} - -{{% expand "Motor" %}} -{{< readfile "/static/include/components/test-control/motor-control.md" >}} -{{% /expand%}} - -{{% expand "Movement sensor (GPS)" %}} -{{< readfile "/static/include/components/test-control/movement-sensor-gps-control.md" >}} -{{% /expand%}} - -{{% expand "Movement sensor (IMU)" %}} -{{< readfile "/static/include/components/test-control/movement-sensor-imu-control.md" >}} -{{% /expand%}} - -{{% expand "Power sensor" %}} -{{< readfile "/static/include/components/test-control/power-sensor-control.md" >}} -{{% /expand%}} - -{{% expand "Sensor" %}} - -## Test the sensor - -{{< readfile "/static/include/components/test-control/sensor-control.md" >}} - -{{% /expand%}} - -{{% expand "Servo" %}} -{{< readfile "/static/include/components/test-control/servo-control.md" >}} -{{% /expand%}} - -### Services - -The following services also provide control interfaces: - -- [SLAM](/services/slam/cartographer/#create-a-new-map): for creating a new SLAM map and for using the motion service to move a machine on a SLAM map -- [Navigation](/services/navigation/#control-tab-usage): for moving a machine to waypoints on a map - -## Control interface in the Viam mobile app - -{{}} - -In addition to the [Viam app](https://app.viam.com), the fully featured web application where you can access all fleet management tools, there is a Viam mobile app. - -The [Viam mobile app](/fleet/control/#control-interface-in-the-viam-mobile-app) allows you to test, monitor and remotely operate machines in your fleet. -It provides a control interface for each component and service that you have configured for you machine. - -For example, you can view live camera feeds, adjust components' runtime parameters, and switch between controllable components. - -Additionally, the app allows you to: - -- see if your machines are online -- [view a machine's logs](/cloud/machines/#logs) -- [upload images from your phone to the cloud](/how-tos/upload-data/#upload-images-with-the-viam-mobile-app) -- [invite people to collaborate with you and modify access](/cloud/rbac/#use-the-mobile-app) - -
- -You can find the mobile app on the [App Store](https://apps.apple.com/vn/app/viam-robotics/id6451424162) and on [Google Play](https://play.google.com/store/apps/details?id=com.viam.viammobile&hl=en&gl=US). - - - apple store icon - - - - google play store icon - diff --git a/docs/fleet/data-management.md b/docs/fleet/data-management.md deleted file mode 100644 index 056e37dc9d..0000000000 --- a/docs/fleet/data-management.md +++ /dev/null @@ -1,106 +0,0 @@ ---- -title: "Data Management" -linkTitle: "Data Management" -weight: 40 -no_list: true -type: "docs" -tags: ["data management", "data", "services"] -description: "Capture data from machines, sync it to the cloud, and access it and train image classification and object detection models on the data." -aliases: - - /manage/data-management/ - - /services/data-management/ - - /manage/data/ - - "/data-management/" - - "/data-management/" - - "/services/data/" - - "/data/" - - /manage/data/export/ - - /data/export/ - - /services/data/export/ - - /manage/data/view/ - - /data/view/ - - /services/data/view/ -icon: true -images: ["/services/icons/data-management.svg"] -no_service: true -date: "2022-01-01" -# updated: "" # When the content was last entirely checked ---- - -The [data management service](/services/data/) allows you to reliably capture and sync data to the cloud where you can query data from all your machines. -You can collect data from your robots, IoT devices, or any other machines, and sync all the data to one place in the cloud without needing to manually gather data from each machine. - -{{}} - -## Cloud data management - -
-{{}} -

- -Once your data is synced to the cloud, you can view, filter, and label data, and assign data to datasets, from your [Viam app **DATA** page](https://app.viam.com/data/view). -You can also interact with your data using the [Viam CLI](/cli/#data), or using the [data client API](/appendix/apis/data-client/). - - - -{{< cards >}} -{{% manualcard title="Create datasets" link="/fleet/dataset/" %}} - -Label data for management and machine learning, with dynamic datasets that change with underlying data modifications. - -{{% /manualcard %}} -{{% manualcard title="Export data" link="/how-tos/export-data/" %}} - -Export data with the Viam CLI and download your data for offline access. - -{{% /manualcard %}} -{{% manualcard title="Upload a batch of data" link="/how-tos/upload-data/" %}} - -Upload data to the Viam Cloud from your computer or mobile device using the data client API, the Viam CLI, or the Viam mobile app. - -{{% /manualcard %}} -{{< /cards >}} - -### Query your data - -Once your data has [synced](/services/data/), you can query it using the [data client API](/appendix/apis/data-client/). -For _tabular_ sensor data, you can also run {{< glossary_tooltip term_id="sql" text="SQL" >}} or {{< glossary_tooltip term_id="mql" text="MQL" >}} queries from the [Query subtab](https://app.viam.com/data/query) of the **Data** tab in the Viam app. - -{{< cards >}} -{{% card link="/how-tos/sensor-data-query-with-third-party-tools/" noimage="True" %}} -{{% card link="/appendix/apis/data-client/" noimage="True" %}} -{{< /cards >}} - -### Permissions - -Data management permissions vary between owners and operators. -For more information about who can do what with data, see [Data Permissions](/cloud/rbac/#data-and-machine-learning). - -## API - -The [data client API](/appendix/apis/data-client/) supports the following methods: - -
- -Methods to upload data like images or sensor readings directly to the Viam Cloud: - -{{< readfile "/static/include/app/apis/generated/data_sync-table.md" >}} - -
- -Methods to download, filter, tag, or perform other tasks on data like images or sensor readings: - -{{< readfile "/static/include/app/apis/generated/data-table.md" >}} - -
- -Methods to work with datasets: - -{{< readfile "/static/include/app/apis/generated/dataset-table.md" >}} - -
- -The data management API supports a separate set of methods that allow you to sync data to the Viam app. -For information about that API, see [Data Management API](/appendix/apis/services/data/). - -For the command line interface `data` command, see [CLI](/cli/#data). diff --git a/docs/fleet/dataset.md b/docs/fleet/dataset.md deleted file mode 100644 index 863897d854..0000000000 --- a/docs/fleet/dataset.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -title: "Datasets" -linkTitle: "Datasets" -description: "Label data and create datasets for managing data and creating machine learning models." -weight: 50 -type: "docs" -tags: ["data management", "cloud", "sync"] -imageAlt: "Label data and create datasets" -images: ["/services/data/label-dog.gif"] -videos: ["/services/data/label-dog.webm", "/services/data/label-dog.mp4"] -videoAlt: "Add a bounding box around the dog in an image." -aliases: - - /manage/data/label/ - - /manage/data/dataset/ - - /data/dataset/ -no_service: true -date: "2022-01-01" -# updated: "" # When the content was last entirely checked -# SME: Tahiya Salam ---- - -A dataset is a grouping of images that you use to train machine learning models. -You can create and manage datasets using the [**DATA** tab](https://app.viam.com/data/view) in the Viam app, using the [data client API](/appendix/apis/data-client/), or using the [CLI `dataset` command](/cli/#dataset). - -{{< alert title="Info" color="info" >}} -Filtered datasets are views and not materialized. -That means the data you are viewing may change as you label and train on the dataset. - -Your dataset is also not versioned. If you train [ML models](/registry/ml-models/) on your dataset and the dataset changes existing models will not be affected but any new models you train will use the dataset with the data in it at the time of training. -{{< /alert >}} - -## Labels - -You label the images in your dataset with bounding boxes or image tags, depending on the type of model you intend to train: - -- **Bounding boxes** are used to train [object detection models](/services/vision/#detections). -- **Image tags** are used to train [image classification models](/services/vision/#classifications). - Tag names support alphanumeric characters, underscores, and hyphens. - -## API - -To interact with datasets programmatically, use the [data client API](/appendix/apis/data-client/), which supports the following methods for working with datasets: - -{{< readfile "/static/include/app/apis/generated/dataset-table.md" >}} - -## How-to guide - -The following how-to guide contains instructions on creating datasets as well as on how to train a model on a dataset: - -{{< cards >}} -{{% card link="/how-tos/train-deploy-ml/" %}} -{{< /cards >}} diff --git a/docs/fleet/provision.md b/docs/fleet/provision.md deleted file mode 100644 index 1287a4ba3f..0000000000 --- a/docs/fleet/provision.md +++ /dev/null @@ -1,259 +0,0 @@ ---- -title: "Provision machines using viam-agent" -linkTitle: "Provisioning Machines" -weight: 20 -type: "docs" -description: "Provision a machine as it first comes online with a pre-defined configuration - in the factory or when the machine is taken into service." -images: ["/platform/provisioning-demo.gif"] -videos: ["/platform/provisioning-demo.webm", "/platform/provisioning-demo.mp4"] -tags: ["fleet management", "viam-server", "viam-agent"] -# SMEs: James, Ale -aliases: - - "/build/provision/" - - "/fleet/provision/" -date: "2024-08-16" -# updated: "" # When the content was last entirely checked ---- - -You can use Viam's software provisioning manager (`agent-provisioning`), to provision a machine as it first comes online with a pre-defined configuration. -This is useful when deploying a fleet of machines directly from the factory to a customer, or when bundling proprietary software on your Viam machine. - -Provisioning is a feature of [`viam-agent`](/configure/agent/), which you can install as part of your manufacturing process. -`agent-provisioning` will then perform the rest of the first-time setup for your machine once an [end user sets up the machine](#end-user-experience). - -Consider a company that sells machines that monitor weather conditions on a maritime craft and provide navigation advice based on those readings. -Such a machine might use Viam to regularly capture and upload a stream of sensor readings, for example. -To parse the readings and provide tailored guidance to a ship's captain, the company writes their own proprietary application which includes live analytics and speech generation for conveying advice to the captain. - -Using `agent-provisioning`, this company can ship their machines directly to customers with `viam-agent` installed. -When a customer sets up their machine, `viam-agent` installs `viam-server`. -By having the end customer set up the machine, the company: - -- eliminates per-device setup and individualization at the factory -- allows for tailored configurations per customer as needed -- allows customer to provide their own WiFi credentials - -{{< alert title="Support Notice" color="note" >}} - -Provisioning is supported and tested only on Debian 11 (Bullseye), and 12 (Bookworm) but should work on most distros using NetworkManager v1.42 (or newer) as well. -For Bullseye, the installation of `viam-agent` changes the network configuration to use NetworkManager. - -{{< /alert >}} - -For a guide on how to configure provisioning for your machine, see: - -{{< cards >}} -{{% card link="/how-tos/provision-setup/" %}} -{{< /cards >}} - -## End user experience - -End users receive a machine, and use either a captive web portal or mobile app to complete the machine setup. - -One option is to use the [Viam mobile app](/fleet/control/#control-interface-in-the-viam-mobile-app). -The Viam mobile app allows end users to create a new machine in the app, and `agent-provisioning` will then install `viam-server` and run it with a provided configuration. - -To add your branding, you can build your own mobile app and use the [Flutter SDK](https://flutter.viam.dev/viam_protos.provisioning.provisioning/ProvisioningServiceClient-class.html) or the [TypeScript SDK](https://github.com/viamrobotics/viam-typescript-sdk/blob/main/src/app/provisioning-client.ts) to connect to `viam-agent` and provision your machines. - -If you are not using Flutter or TypeScript and would like to use provisioning, please [contact us](mailto:support@viam.com). - -For an end-user guide to setting up their machine, see: - -{{< cards >}} -{{% card link="/how-tos/provision/" %}} -{{< /cards >}} - -This is the general process for provisioning depending on whether you are using a captive web portal or a mobile app: - -{{< tabs >}} -{{% tab name="Mobile app" min-height="703px" %}} - -{{