diff --git a/openbot-qna-main/chroma_db/chroma.sqlite3 b/openbot-qna-main/chroma_db/chroma.sqlite3 new file mode 100644 index 000000000..77fe4c1d0 Binary files /dev/null and b/openbot-qna-main/chroma_db/chroma.sqlite3 differ diff --git a/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/data_level0.bin b/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/data_level0.bin new file mode 100644 index 000000000..9f307c86f Binary files /dev/null and b/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/data_level0.bin differ diff --git a/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/header.bin b/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/header.bin new file mode 100644 index 000000000..074f5b8bb Binary files /dev/null and b/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/header.bin differ diff --git a/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/length.bin b/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/length.bin new file mode 100644 index 000000000..80afbc7f3 Binary files /dev/null and b/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/length.bin differ diff --git a/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/link_lists.bin b/openbot-qna-main/chroma_db/d3b93d1b-d341-43a6-9890-4c344fc61b8a/link_lists.bin new file mode 100644 index 000000000..e69de29bb diff --git a/openbot-qna-main/final_slow.py b/openbot-qna-main/final_slow.py new file mode 100644 index 000000000..8c72a2750 --- /dev/null +++ b/openbot-qna-main/final_slow.py @@ -0,0 +1,153 @@ +import json +import os +import streamlit as st +import google.generativeai as gen_ai +from dotenv import load_dotenv + +# Load environment variables +load_dotenv() + +# Configure Streamlit page settings +st.set_page_config( + page_title="OpenBot Chat", + page_icon="🔍", + layout="centered", +) + +# Set up the Generative AI model +GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") +gen_ai.configure(api_key=GOOGLE_API_KEY) +model = gen_ai.GenerativeModel('gemini-pro') + +# Load preprocessed summarized README content +@st.cache_resource +def load_preprocessed_summaries(): + try: + with open('summarized_readmes.json', 'r') as f: + return json.load(f) + except Exception as e: + st.error(f"Error loading preprocessed summaries: {e}") + return {} + +# Load the summarized content +summarized_readme_contents = load_preprocessed_summaries() + +# Combine all summarized content into one string +combined_summary_content = "\n\n---\n\n".join([ + f"Summary from {url}:\n{summary}" for url, summary in summarized_readme_contents.items() +]) + +# Initialize session state for chat history if not already present +if "chat_history" not in st.session_state: + st.session_state.chat_history = [] + +# CSS Styling for Chat UI +st.markdown(""" + + """, unsafe_allow_html=True) + +# Title of the Streamlit app +st.title("🔍 OpenBot Chat") + +# Checkbox for debugging and displaying the combined summary content +#if st.checkbox("Show Summarized README Content"): +# st.text_area("Combined Summarized README Content", combined_summary_content, height=200) + +# User input area in the form +with st.form(key="user_input_form"): + user_input = st.text_input( + "Ask a question about OpenBot", + placeholder="e.g., What is OpenBot?", + key="user_input" + ) + submit_button = st.form_submit_button("Ask") + +# Function to check if the response contains a source link +def contains_source_link(response_text): + """Check if the response contains a valid source URL.""" + return "Source:" in response_text and "http" or "github.com" in response_text + +# Process user input and generate response +if submit_button and user_input: + # Check if summarized content is loaded + if not combined_summary_content: + st.error("Could not load summarized README contents.") + st.stop() + + # Save user input to the chat history + st.session_state.chat_history.append(("user", user_input)) + + # Split the summarized content into chunks if necessary (to avoid exceeding token limits) + CHUNK_SIZE = 15000 # Adjust chunk size as needed to fit within token limits + readme_chunks = [ + combined_summary_content[i:i + CHUNK_SIZE] + for i in range(0, len(combined_summary_content), CHUNK_SIZE) + ] + + responses = [] + for chunk in readme_chunks: + contextual_prompt = f"""Based on the following summarized README content chunk, please provide a detailed answer to the question. If the information comes from a specific README, include that source in your response: + +{chunk} + +Question: {user_input} + +Please provide a comprehensive answer and cite which README file(s) the information comes from. +""" + try: + # Get the response from the AI model + response = model.start_chat(history=[]).send_message(contextual_prompt) + responses.append(response.text) + except Exception as e: + st.error(f"Error generating response for a chunk: {e}") + continue + + # Filter responses to ensure valid sources are included + valid_responses = [resp for resp in responses if contains_source_link(resp)] + + # If at least one response has a source, use it; otherwise, provide a fallback message + if valid_responses: + final_response = "\n\n---\n\n".join(valid_responses) + else: + final_response = "I could not find a direct answer. Try to search differently!" + + # Add the final response to chat history + st.session_state.chat_history.append(("assistant", final_response)) + +# Display chat history (user and assistant messages) +for role, message in st.session_state.chat_history: + if role == "user": + st.markdown(f""" +
+ You: +

{message}

+
+ """, unsafe_allow_html=True) + else: + st.markdown(f""" +
+ OpenBot: +

{message}

+
+ """, unsafe_allow_html=True) diff --git a/openbot-qna-main/preprocessing_readme.py b/openbot-qna-main/preprocessing_readme.py new file mode 100644 index 000000000..0862e667c --- /dev/null +++ b/openbot-qna-main/preprocessing_readme.py @@ -0,0 +1,110 @@ +import os +import requests +from dotenv import load_dotenv +import google.generativeai as gen_ai +from sqlalchemy import create_engine + +# Load environment variables +load_dotenv() + +GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") +gen_ai.configure(api_key=GOOGLE_API_KEY) +model = gen_ai.GenerativeModel('gemini-pro') + +# Function to fetch README content +def fetch_readme_content(raw_url): + try: + response = requests.get(raw_url) + if response.status_code != 200: + return None + return response.text + except Exception as e: + return None + +# Function to summarize the README content +def summarize_readme(content): + summary_prompt = f"Summarize the following README content briefly:\n\n{content}" + try: + summary_response = model.start_chat(history=[]).send_message(summary_prompt) + return summary_response.text + except Exception as e: + return None + +# Function to store summaries in a database (e.g., PostgreSQL) +def store_summary_in_db(summary, url): + engine = create_engine(os.getenv("DATABASE_URL")) + with engine.connect() as connection: + connection.execute( + "INSERT INTO readme_summaries (url, summary) VALUES (%s, %s)", + (url, summary) + ) + +# Process the README URLs +README_URLS = { + "https://github.com/isl-org/OpenBot/blob/master/README.md": + "https://raw.githubusercontent.com/isl-org/OpenBot/master/README.md", + "https://github.com/isl-org/OpenBot/blob/master/android/README.md": + "https://raw.githubusercontent.com/isl-org/OpenBot/master/android/README.md", + "https://github.com/isl-org/OpenBot/blob/master/android/controller/README.md": + "https://raw.githubusercontent.com/isl-org/OpenBot/master/android/controller/README.md", + "https://github.com/isl-org/OpenBot/blob/master/android/robot/README.md": + "https://raw.githubusercontent.com/isl-org/OpenBot/master/android/robot/README.md", + "https://github.com/isl-org/OpenBot/blob/master/android/robot/src/main/java/org/openbot/googleServices/README.md": + "https://raw.githubusercontent.com/isl-org/OpenBot/master/android/robot/src/main/java/org/openbot/googleServices/README.md", + "https://github.com/isl-org/OpenBot/blob/master/android/robot/ContributionGuide.md": + "https://raw.githubusercontent.com/isl-org/OpenBot/master/android/robot/ContributionGuide.md", + "https://github.com/ob-f/OpenBot/blob/master/body/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/cad/block_body/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/diy/cad/block_body/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/cad/glue_body/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/diy/cad/glue_body/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/cad/regular_body/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/diy/cad/regular_body/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/cad/slim_body/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/diy/cad/slim_body/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/pcb/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/diy/pcb/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/diy/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/lite/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/lite/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/mtv/pcb/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/mtv/pcb/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/mtv/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/mtv/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/rc_truck/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/rc_truck/README.md", + "https://github.com/ob-f/OpenBot/blob/master/body/rtr/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/body/rtr/README.md", + "https://github.com/ob-f/OpenBot/blob/master/controller/flutter/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/controller/flutter/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md", + "https://github.com/ob-f/OpenBot/blob/master/controller/flutter/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/controller/flutter/README.md", + "https://github.com/ob-f/OpenBot/blob/master/firmware/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/firmware/README.md", + "https://github.com/ob-f/OpenBot/blob/master/ios/OpenBot/OpenBot/Authentication/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/ios/OpenBot/OpenBot/Authentication/README.md", + "https://github.com/ob-f/OpenBot/blob/master/ios/OpenBot/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/ios/OpenBot/README.md", + "https://github.com/ob-f/OpenBot/blob/master/open-code/src/components/blockly/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/open-code/src/components/blockly/README.md", + "https://github.com/ob-f/OpenBot/blob/master/open-code/src/services/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/open-code/src/services/README.md", + "https://github.com/ob-f/OpenBot/blob/master/open-code/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/open-code/README.md", + "https://github.com/ob-f/OpenBot/blob/master/policy/frontend/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/policy/frontend/README.md", + "https://github.com/ob-f/OpenBot/blob/master/policy/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/policy/README.md", + "https://github.com/ob-f/OpenBot/blob/master/python/README.md": + "https://raw.githubusercontent.com/ob-f/OpenBot/master/python/README.md" +} + +# Loop through each README URL and process +for display_url, raw_url in README_URLS.items(): + content = fetch_readme_content(raw_url) + if content: + summary = summarize_readme(content) + if summary: + store_summary_in_db(summary, raw_url) diff --git a/openbot-qna-main/requirements.txt b/openbot-qna-main/requirements.txt new file mode 100644 index 000000000..36e66a8bc --- /dev/null +++ b/openbot-qna-main/requirements.txt @@ -0,0 +1,10 @@ +beautifulsoup4 +langchain_google_genai +google-generativeai +langchain-community +chromadb +langchain==0.0.335 +gradio==3.50.2 +llama-cpp-python==0.2.18 +pypdf +tenacity diff --git a/openbot-qna-main/streamlit/config.toml b/openbot-qna-main/streamlit/config.toml new file mode 100644 index 000000000..6c8eedf9f --- /dev/null +++ b/openbot-qna-main/streamlit/config.toml @@ -0,0 +1,58 @@ +[global] +# If True, will show a warning when you run a Streamlit-enabled script via "python my_script.py". +# Default: true +showWarningOnDirectExecution = true + +[logger] +# Level of logging: 'error', 'warning', 'info', or 'debug'. +# Default: 'info' +level = "debug" + + + +[runner] +# Allows you to type a variable or string by itself in a single line of Python code to write it to the app. +# Default: true +magicEnabled = true + + + +[server] +# List of folders that should not be watched for changes. Relative paths will be taken as relative to the current working directory. +# Example: ['/home/user1/env', 'relative/path/to/folder'] +# Default: [] +folderWatchBlacklist = [''] + +# If false, will attempt to open a browser window on start. +# Default: false unless (1) we are on a Linux box where DISPLAY is unset, or (2) server.liveSave is set. +headless = true + +# Immediately share the app in such a way that enables live monitoring, and post-run analysis. +# Default: false +liveSave = false + +# Automatically rerun script when the file is modified on disk. +# Default: false +runOnSave = false + +# The port where the server will listen for client and browser connections. +# Default: 8501 +port = 80 + +# Enables support for Cross-Origin Request Sharing, for added security. +# Default: true +enableCORS = false + +[browser] +# Internet address of the server that the browser should connect to. Can be IP address or DNS name. +# Default: 'localhost' +serverAddress = "0.0.0.0" + +# Whether to send usage statistics to Streamlit. +# Default: true +gatherUsageStats = true + +# Port that the browser should use to connect to the server when in liveSave mode. +# Default: whatever value is set in server.port. +serverPort = 80 + diff --git a/openbot-qna-main/streamlit/credentials.toml b/openbot-qna-main/streamlit/credentials.toml new file mode 100644 index 000000000..bf20fa1e0 --- /dev/null +++ b/openbot-qna-main/streamlit/credentials.toml @@ -0,0 +1,3 @@ +[general] +email="" + diff --git a/openbot-qna-main/summarized_readmes.json b/openbot-qna-main/summarized_readmes.json new file mode 100644 index 000000000..e200a6682 --- /dev/null +++ b/openbot-qna-main/summarized_readmes.json @@ -0,0 +1,31 @@ +{ + "https://github.com/isl-org/OpenBot/blob/master/README.md": "OpenBot empowers users to transform smartphones into robotic brains. It offers a cost-effective approach with a custom electric vehicle as the robotic body at approximately $50. The software stack for Android smartphones enables advanced robotics functionalities such as person tracking and real-time autonomous navigation.\n\nTo utilize OpenBot, users can refer to the provided guidelines: starting with the disclaimer, building the robot body, flashing the Arduino firmware, installing the Android apps, operating the robot via a controller, programming in the OpenBot Playground, and potentially training custom driving policies.\n\nThe source code can be obtained through various methods, including downloading a zip file, cloning the GitHub repository, or forking the repository for potential contributions.\n\nSeveral cool projects showcase the versatility of OpenBot, including tanks, 2WD vehicles, cardboard creations, and Baby Yoda-inspired robots.\n\nFor further assistance or collaboration, users can join the OpenBot community on Slack or contact the team via email. Contributions are welcome and guided by the provided guidelines. If users employ OpenBot in their research or development, they are encouraged to cite the associated paper.", + "https://github.com/isl-org/OpenBot/blob/master/android/README.md": "The provided README focuses on Android apps developed for OpenBot.\n\n**Features:**\nThe apps offer various features. Users can explore the functionalities of the Robot and Controller apps by following the provided links.\n\n**Installation:**\nTo install the apps, either scan the corresponding QR codes using a phone browser or download the APKs from the provided release assets or build artifacts.\n\n**Building the Apps:**\nPrerequisites for building the apps include Android Studio 2022.1.1, an Android device with API 21 or higher, and SDKs 33 (compile) and 32 (target). To build the apps, open the Android project in Android Studio, select the desired configuration, connect the device, and click Run.\n\n**Troubleshooting:**\nIn case of version compatibility issues, upgrade Android Studio or downgrade the gradle plugin. Consult the provided link for further information on compatibility.", + "https://github.com/isl-org/OpenBot/blob/master/android/controller/README.md": "The Controller App for OpenBot functions as a remote control for the OpenBot vehicle, similar to a PS3/4 or Xbox controller. Upon starting the app, it attempts to connect to the robot by placing it into **Phone** control mode. Connecting through the FreeRoamFragment is also possible. Once connected, the controller displays on-screen controls for driving the robot by tilting the phone or using sliders. Tilt mode allows for accelerometer-based steering and speed control through \"accelerator\" and \"brake\" pedals. Future development plans include displaying robot sensor information, video streaming, gyroscope control, and crash/bump event notifications from the robot.", + "https://github.com/isl-org/OpenBot/blob/master/android/robot/README.md": "**Robot App**\n\n**Disclaimer:**\n\n* Always operate in a safe environment and use at your own risk.\n* The app is under development and may encounter issues. Test before connecting wheels.\n\n**App Screens:**\n\n**Main Menu:**\n* Displays available screen options, including settings.\n\n**Settings Menu:**\n\n* USB connection: Adjust baud rate and connect/disconnect the device.\n* Permissions: Check and adjust app permissions.\n* Video streaming: Choose between WebRTC or RTSP streaming.\n* Bluetooth connection: Turn on BLE support, scan for nearby devices, and connect.\n\n**Free Roam:**\n* Real-time robot control with battery, speed, and distance information.\n* Drive states (Drive, Neutral, Reverse), steering, and speed controls.\n\n**Data Collection:**\n* Simple interface for collecting data sets.\n* Select web app for automatic data upload.\n* Adjust camera preview and model resolution for image capture. The mobile app provides various screens for controlling and interacting with a robotic vehicle:\n\n- **Data Collection**: Allows data collection for training machine learning models.\n- **Controller Mapping**: Displays controller button and joystick mapping.\n- **Robot Info**: Provides robot information, tests functionality, and allows for LED control and motor commands.\n- **Autopilot**: Runs trained autopilot models for autonomous navigation.\n- **Object Tracking**: Tracks objects from 80 classes, with adjustable speed based on object proximity.\n- **Point Goal Navigation**: Specifies a 2D goal for the robot to navigate to using AI policies.\n- **Model Management**: Lists and describes available machine learning models and benchmarks. This content provides a comprehensive benchmark for SSD object detectors on various smartphones. The benchmark includes seven models: MobileNetV1-300, MobileNetV3-320, YoloV4-tiny-224, YoloV4-tiny-416, YoloV4-224, YoloV5s-320, YoloV5s-640, YoloV5m-320, YoloV5l-320, and EfficientDet-L0-320, EfficientDet-L1-384, each with varying mAP (mean Average Precision) ranging from 16% to 40%. The benchmark tests these models on six different phones: Samsung S22 Ultra, Samsung S20FE 5G, Huawei P30 Pro, Google Pixel 6XL, Xiaomi Mi9, and Google Pixel 4XL, showcasing their performance in terms of frames per second (FPS) for CPU, GPU, and NNAPI (Neural Network API). The benchmark also provides insights into the impact of input resolution on model performance and includes notes on potential performance deterioration in landscape mode for certain models. **Model Performance (EfficientDet-L2-448)**\n\n- Model accuracy (mAP): 34%\n- Model performance deteriorates in landscape mode; confidence threshold adjustment may be necessary.\n\n**App Features**\n\n- **DefaultActivity:**\n - Displays vehicle connection status and sensor measurements.\n - Allows robot control via game controllers or external smartphone app.\n - Records data from various sensors (including camera, GPS, and vehicle sensors).\n - Integrates neural networks for person following and autonomous navigation.\n\n- **USB Connection:**\n - Establishes connection to the vehicle.\n\n- **Vehicle Status:**\n - Displays battery voltage, wheel speed, and obstacle distance.\n\n- **Control:**\n - Selects control mode (gamepad or phone), drive mode (gamepad controls), and speed mode (motor voltage limit).\n\n- **Data Log:**\n - Records sensor data and optionally images.\n - Four logging modes: only sensors, cropped images, preview images, or all images.\n\n- **Camera:**\n - Displays preview resolution and allows image rotation. **Camera**\n\n* The crop resolution (256x96 for \"AUTOPILOT_F\" model) is used as input to neural networks.\n* Toggle between rear and front camera.\n\n**Model**\n\n* **MobileNetV1-300**: Used for person following, comes with the app.\n* **CIL-Mobile**: Used for autonomous navigation, requires user training.\n* Additional models can be downloaded.\n* Toggle switch turns the network on/off.\n\n**Device**\n\n* Select execution device: CPU, GPU, or NNAPI.\n* Inference speed displayed for active models.\n\n**Projects Screen**\n\n* Displays OpenBot Playground projects if signed in with Google.\n* Scan project QR codes or execute projects.\n* Reload projects by pulling down on the screen.\n\n**Google Drive Projects**\n\n* Run projects from Google Drive by selecting them and clicking \"Start\".\n* Pop-up displays project name and start/cancel buttons.\n\n**QR Code Scanner**\n\n* Scan QR codes to run projects.\n* Grant camera access and wait for file retrieval.\n* Pop-up displays project name and start/cancel buttons.\n\n**Executing Project**\n\n* Displays code block names and a stop button.\n* Delete projects by swiping left on project cards. **App Functionality:**\n\n* **Project Management:** Deleting a project by long-pressing and confirming.\n\n**Profile Screen:**\n\n* If signed out: \"Google Sign-in\" button prompts for sign-in.\n* If signed in:\n * \"Edit Profile\" button allows for updating user information.\n * \"Logout\" button logs out the user.\n\n**OpenBot PlayGround Screen:**\n\n* Access to OpenBot Playground services via the toolbar icon.\n\n**Contribution:**\n\n* Refer to the Contribution Guide for adding custom fragments.\n\n**Code Structure:**\n\n* Based on Tensorflow Lite Object Detection Android Demo.\n* Main thread runs in DefaultActivity, which inherits camera and UI controls from CameraActivity.\n* SensorService handles phone sensor data logging.\n* ServerService and NsdService connect to a local Python server for data collection and model training.\n* env folder contains utility classes for vehicle, game, phone control, and audio feedback.\n* tflite folder defines models for Autopilot and Detector networks.\n\n**Optional Next Step:**\n\n* Train a custom Driving Policy.", + "https://github.com/isl-org/OpenBot/blob/master/android/robot/src/main/java/org/openbot/googleServices/README.md": "Firebase is a platform for mobile and web app development that provides a suite of services to streamline app building. It offers features like real-time database, user authentication, hosting, cloud storage, and more, all integrated into a single platform. By utilizing Firebase, developers can manage their backend infrastructure, leaving them free to focus on delivering exceptional user experiences.\n\nFor this specific Android application, Firebase is used for Google Sign-In authentication, which allows users to access the app using their Google credentials. This feature ensures secure and convenient access without requiring separate login credentials. Firebase manages the entire authentication process, including identity verification and providing unique user IDs.\n\nIntegrating Firebase into the app requires a Google account, a Firebase account, and a new Firebase project for the Android application. To set up the Firebase project, users should create a new project in the Firebase Console, disable Google Analytics if desired, and register a new Android app. The registration process includes providing the app's package name, nickname, and SHA-1 key, which is mandatory for Firebase Authentication. The SHA-1 key serves as a unique identifier for the app's signing certificate and is used by Firebase to verify the app's authenticity. **Firebase Setup Guide**\n\n**Keystore Management:**\n\n* Retrieve debug keystore details using the provided command for Mac or Windows. This requires specifying aliases, store, and key passwords.\n\n**Firebase Integration:**\n\n* Download and add the `google-services.json` file to both the app and assets directories.\n* Skip the third step of the setup wizard as Firebase SDK is already added in the gradle file.\n* Enable Google Sign-In authentication by following the steps provided in the Firebase Console.\n\n**SHA-1 Key Addition:**\n\n* Add the SHA-1 key to your Firebase project in the Console if it was not added during setup.\n* Navigate to Project Settings and under Your Apps, select the Android app.\n* Add the SHA-1 key in the SHA-1 certificate fingerprints section and save.\n\n**Firebase Configuration Update:**\n\n* If Firebase authentication was implemented before adding the SHA-1 key, replace the `google-services.json` file with the updated version from the Firebase Console.", + "https://github.com/isl-org/OpenBot/blob/master/android/robot/ContributionGuide.md": "**Contribution Guide**\n\nThe codebase follows the single activity architecture. The main activity launches the main fragment, which uses a RecyclerView to navigate to different screens using Navigation. The data for the RecyclerView is provided by `FeatureList.java`.\n\nTwo abstract classes are provided:\n\n* `ControlsFragment.java`: Provides functionality for controlling the robot via a controller. Inheriting classes can process controller key data accordingly.\n* `CameraFragment.java`: Integrates camera preview using CameraX API and extends `ControlsFragment.java` for robot connection and control.\n\nTo add a new feature:\n\n1. Create a fragment and layout file.\n2. Extend `CameraFragment.java` or `ControlsFragment.java` based on whether camera preview is required.\n3. Add the feature to `FeatureList.java` with its title, icon, and color.\n4. Add the feature to the navigation graph in `nav_graph.xml`.\n5. Add a case to navigate to the feature in `MainFragment.java`.", + "https://github.com/ob-f/OpenBot/blob/master/body/README.md": "**OpenBot Software**\n\nOpenBot provides a software platform for building and controlling wheeled robots. You can use either a ready-to-run (RTR) OpenBot or build your own using a microcontroller like Arduino Nano.\n\n**Building Options**\n\n* **DIY:** 3D-printed body for wheeled robots using low-cost hardware.\n* **Lite:** Simplified DIY version designed for education.\n* **RC Truck:** 3D-printed body for commercially available 1:16 RC trucks.\n* **Multi-Terrain Vehicle (MTV):** Blueprints for a versatile outdoor platform.\n\n**Other Features**\n\n* Detailed instructions and video guides for all build options.\n* Ready-to-use firmware for Arduino.", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/cad/block_body/README.md": "The Blocky Body chassis provides additional height for electronics and a Lego-compatible top, featuring structural integrity comparable to the regular body. The bottom part and one of the top parts are required, which are available in various sizes to accommodate different print bed sizes and electronics volume. The recommended print settings include a layer height of 0.2mm, a wall line count of 3, 25% infill with a concentric pattern, and support with a 15% density. The body is designed to maintain bumpers and withstand the rigors of use, providing a versatile platform for robot building.", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/cad/glue_body/README.md": "The \"Glueable Body\" folder contains files for a 3D-printed OpenBot body that has been split into four smaller pieces for printing on build plates as small as 150mmx140mm. The required parts are `glue_body_bottom_A`, `glue_body_bottom_B`, `glue_body_top_A`, and `glue_body_top_B`. Optional parts, such as `glue_connector_bottom`, `glue_connector_top_A`, and `glue_connector_top_B`, provide extra surface area for gluing, which can improve stability in the event of warping.", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/cad/regular_body/README.md": "**OpenBot Regular Body Summary:**\n\nThe OpenBot assembly requires two printed parts: `body_bottom` and `body_top` (available in STL and STEP formats). These parts require a build plate size of at least 240mm x 150mm.\n\nFor optimal printing on an Ultimaker S5, the following settings are recommended:\n\n* Layer height: 0.2mm\n* Wall thickness: 1.5mm\n* Infill density: 20%\n* Infill pattern: Grid\n* Print speed: 80mm/s\n* No support\n\nThe chassis was successfully printed using PLA, ABS, and CPE materials. While print settings did not significantly impact print quality, slower printing and lower layer heights can improve the result. Support structures can enhance printing but require removal afterward.", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/cad/slim_body/README.md": "The \"Slim Body\" folder contains a smaller version of the OpenBot body designed for 3D printers with limited build volumes. This slim body can be printed on a 220mmx220mm build plate by rotating the parts 45 degrees. It includes two main parts: the bottom (`slim_body_bottom`) and top (`slim_body_top`). Additionally, a `slim_body_top_rim` part is available for printers with slightly more space (223mmx223mm), providing an easier-to-remove top. To ensure proper fit, adjust printer settings to maximize print area by disabling \"Build Plate Adhesion Type,\" prime blob, and the second extruder if applicable.", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/pcb/README.md": "The custom printed circuit board (PCB) serves as a carrier for the Arduino Nano, integrating modern motor drivers, a voltage divider circuit, and resistors for LEDs. The Arduino connects via pin header, while sensors and LEDs are attached through Dupont cables.\n\nThe latest PCB version (version 2) features improvements such as moving the right speed sensor to pin D3 for interrupt functionality and adding a Power LED for the main battery. It also uses updated components for better availability and precision.\n\nTo acquire the PCB, download the Gerber files, order the board from a vendor, and purchase the components from a supplier like LCSC. You can either assemble the PCB yourself or use a vendor's assembly service. Vendors such as JLCPCB offer a TurnKey option that handles PCB manufacturing, component sourcing, and assembly. When requesting a quote from PCBWay, select the assembly service and upload the BOM and Centroid File. Your quote will be updated within a few days, allowing you to proceed with payment and production.", + "https://github.com/ob-f/OpenBot/blob/master/body/diy/README.md": "OpenBot DIY provides instructions for building a low-cost, hobby-grade wheeled robot using 3D-printed parts and readily available electronics.\n\nFor 3D printing, several robot body options are provided, including a standard body, a slim body for smaller printers, a glue-able body for even smaller printers, and a blocky body with customizable features. Additionally, a phone mount is required for attaching a smartphone to the robot.\n\nIf 3D printing is not feasible, alternative chassis kits and phone mounts are suggested. Users can also build their own chassis and mount using materials like wood or cardboard.\n\nAssembly options include a DIY approach using an L298N motor driver, recommended for experienced hobbyists, and a custom PCB for easier assembly and a cleaner build.\n\nComponents required for the robot include motors, wheels, batteries, motor driver, Arduino Nano controller, LEDs, sensors, and a mobile phone with OpenBot app for control. Links to purchase components in Germany, the United States, and AliExpress are provided for convenience. **Required Components:**\n\n* Arduino Nano\n* 4x TT Motors with Tires\n* 3x 18650 Batteries\n* 1x 18650 Battery Holder\n* 1x USB OTG Cable\n* 1x Spring or Rubber Band\n* 16x M3x25 Screws\n* 16x M3 Nuts\n* 6x M3x5 Screws\n* Dupont Cables\n\n**Optional Components:**\n\n* 2x Speed Sensors\n* 1x Ultrasonic Sensor\n* 1x On/Off Switch\n* 2x Orange LEDs (5mm)\n* 1x OLED Display\n\n**DIY Components (Option 1):**\n\n* 1x L298N Motor Driver\n* (Optional) Resistors (2x 150\u03a9 for LEDs, 20 k\u03a9 and 10 k\u03a9 for Voltage Divider)\n\n**PCB Components (Option 2):**\n\n* 1x Custom PCB\n* 5x Micro JST PH 2.0 Cables\n\n**Build Instructions (Option 1 - DIY):**\n\n1. Solder wires to motors and add encoder disks (if using speed sensors).\n2. Connect motors to L298N board and mount motors with screws and nuts.\n3. Mount L298N with screws.\n4. Connect wires and components as per the Wiring Diagram. **Option 1: DIY**\n\n* Mount the wheels.\n* Attach the motors.\n* Install the battery case.\n* Optionally install the ultrasonic sensor, LEDs, on/off switch, OLED display, and voltage divider.\n* Connect the wiring and peripherals to the Arduino Nano.\n\n**Option 2: Custom PCB**\n\n* Solder wires and encoder disks to the motors.\n* Mount the motors and PCB.\n* Connect the motors to the PCB.\n* Follow steps 5-12 from the DIY option.\n* Connect the ultrasonic sensor to the designated header on the PCB. This guide provides instructions for assembling the Robomaster EP Sonar Sensor:\n\n1. Install the distance sensor module on the PCB.\n2. Solder the headers to the PCB.\n3. Install the power module on the PCB.\n4. Solder the headers to the power module.\n5. Connect the left and right indicators to the PCB.\n6. Connect the left and right speed sensors to the PCB.\n7. Optionally connect the OLED display to the PCB.\n8. Connect the power cables to the PCB.\n9. Follow additional assembly steps from the DIY option.\n10. Flash the Arduino Firmware.", + "https://github.com/ob-f/OpenBot/blob/master/body/lite/README.md": "ITE's eSpace has developed an educational curriculum for AI and robotics utilizing the OpenBot software stack. The curriculum consists of step-by-step YouTube videos guiding students through building the OpenBot Lite (either with micro:bit or Arduino), installing software, and implementing person following and autonomous driving capabilities.\n\nOpenBot Lite, a simplified version of the OpenBot DIY, enables economical and scalable AI education. It supports micro:bit or Arduino Nano, providing an accessible platform for learning.\n\nThe curriculum, tested by educators in South Korea and Germany, has successfully introduced over 100 students to AI and robotics. It features a 2.4m x 1.8m playfield where the robot learns to navigate autonomously, avoiding obstacles, making it suitable for home-based learning.\n\nStep-by-step video guides are available on YouTube in the following playlists:\n\n* Arduino Version: Assembly, Person Following, Policy Learning\n* micro:bit Version: Assembly, Person Following, Code Explanation, Policy Learning\n\nTo express appreciation for the educational materials, users are encouraged to like videos and subscribe to the eSpace Channel. Sharing success stories and OpenBot videos on social media and Slack is also welcomed.", + "https://github.com/ob-f/OpenBot/blob/master/body/mtv/pcb/README.md": "The MTV control architecture consists of a smartphone running Intel OpenBot, transmitting commands to an ESP32 microcontroller. The ESP32 handles pwm generation and sensor communication.\n\nThe MTV's locomotion system utilizes six 12V DC motors with planetary gearboxes and magnetic encoders. Off-the-shelf motor drivers are used for modularity. The component architecture includes an ESP32 MCU, motor drivers, and connectors.\n\nTo minimize size, the PCB design is split into Main PCB and Power distribution PCB. The Main PCB houses primary components while the Power distribution PCB converts voltage for different needs. Rechargeable 11.1V LiPo battery is integrated with a voltage display for battery level monitoring.\n\nCustom 2-layer PCBs are designed for power generation and control with modularity and easy plug-and-play capability. Power lines and signal lines are separated to reduce interference. Additional power ports and pin headers are included for future expansion. Prototyped PCBs with partially wired components, ESP32, and motor drivers are provided for illustration.", + "https://github.com/ob-f/OpenBot/blob/master/body/mtv/README.md": "OpenBot's Multi-Terrain Vehicle (MTV) is an off-road alternative to the original OpenBot. Designed in collaboration with Ivy Tech LTD, the MTV features inexpensive and readily printable parts. Inspired by lunar and Martian exploration vehicles, it employs a Rocker-Bogie configuration with six actuated wheels, eliminating radial rotation. Unlike most rovers, the MTV operates similarly to a tank, making manual control intuitive.\n\nTo assemble the MTV, print the provided 3D parts. The motor assembly consists of Motor Enclosure Top, Motor Enclosure Bottom, and Motor Bracket. Joints include 90-degree Joint, 100-degree Joint, 100-degree Joint Mirror, and End Joint. The legs comprise Front Leg, Mid Leg, Back Leg, and Top Back Leg. The table provides detailed information on each part, including quantity, material, print duration, and cost. This documentation provides a comprehensive inventory of 3D CAD models for an MTV project. Each model has an assigned part number, a brief description, and links to STL and STEP file formats. Additionally, key details such as quantity, images, weight, printing time, and estimated cost are listed for each part.\n\nThe project components include various structural elements, bearing covers, phone mount platforms, buffers, compartments, and battery mounts. The table details the following parts:\n\n- A1: Front Leg (2 pcs)\n- A2: Rear Frame (1 pc)\n- A3: Side Frame (2 pcs)\n- A4: Phone Mount (1 pc)\n- A5: Steering Hub (1 pc)\n- A6: Steering Wheel (1 pc)\n- A7: Steering Rod (1 pc)\n- A8: Motor Gear (1 pc)\n- A9: Motor Mount (2 pcs)\n- A10: Rear Leg (2 pcs)\n- A11: Bearing Cover (4 pcs)\n- A12: Phone Mount Platform (1 pc)\n- A13-16: Front Buffer (Name Front, Name Back, Buffer Left, and Buffer Right)\n- A17-20: Compartment (Compartment Rear, Compartment Front, Roof Front, and Roof Rear)\n- A21ab: Battery Mount P1 and P2 The provided README contains information about the MTV robot project, including part descriptions, electro-mechanical assembly, and bill of materials.\n\n**Part Descriptions:**\n\n- The table details the components required for the robot body, including STL and STEP CAD files, dimensions, material weight, and print time estimates. The print area required is at least 240mm x 150mm. For optimal printing results on an Ultimaker S5, recommended settings include a 0.2mm layer height, 1.5mm wall thickness, and 20% infill density.\n\n**Electro-Mechanical Assembly:**\n\n- A comprehensive bill of materials is provided, listing readily available hobby electronics and their respective suppliers. Specific components include JGB37-520 DC motors with encoders, 2.8\" Talon tires, 7-core cable, and D-Line cable ducts. Pricing and sourcing options are specified for both EU and US locations, including Amazon and AliExpress. This table lists various products with their descriptions, prices, and availability. The products include:\n\n- **O1:** 70mm RC Buggy Shock Absorber (4 pcs) | Price: $12.99 | AE: $12.99\n- **O2:** 70mm RC Monster Truck Shock Absorber (4 pcs) | Price: $12.99 | AE: $12.99\n- **O3:** 100mm RC Buggy Shock Absorber (2 pcs) | Price: $12.99 | AE: $12.99\n- **O4:** Multi-meter Test Leads - 1+ meter lengths | Price: $12.00 | US: $12.00, EU: \u20ac9.99\n- **O5:** PG7 Cable Gland 3~6.5mm (50 pcs) | Price: $8.99 | US: $8.99, EU: \u20ac9.99\n- **O6:** MR126ZZ Ball Bearings 6x12x4mm (4 pcs) | Price: $5.52 | AE: $5.52\n- **O7:** INJORA 90mm RC Car Spring Shock Absorber (2pcs) | Price: $8.30 | AE: $8.30\n- **O8:** AXSPEED RC Car LED 4.2v-6v White 17mm (2 pcs) | Price: $7.43 | AE: $7.43\n- **O9:** AXSPEED RC Car LED 4.2v-6v Red 17mm (2 pcs) | Price: $7.43 | AE: $7.43\n- **O10:** Vibration Isolators M3 x 8mm Studs (4 pcs) | Price: $8.09 | US: $8.09, EU: \u20ac9.49\n- **O11:** Zeadio Universal Smartphone holder | Price: N/A | US: N/A, EU: N/A This list provides component pricing and availability information for various electronic components, including universal smartphone holders, voltmeters, mini rocker switches, micro USB panel mount cables, custom PCBs, motor drivers, DC converters, and LiPo batteries. Each component is labeled with its own unique identifier (O12-O18) and detailed with an image, pricing, quantity, and links to Amazon marketplaces in both the US and EU. The pricing information includes the cost per unit in both US dollars and Euro. The quantities listed represent the number of units included in each purchase, and the total price is calculated as the cost per unit multiplied by the quantity. This document contains information on various electronics and accessories available for purchase from online retailers such as Amazon in both the US and EU. The table below provides details on each item:\n\n| Item Code | Item Description | Image | Link | US Price | US Quantity | US Cost | EU Price | EU Quantity | EU Cost |\n|---|---|---|---|---|---|---|---|---|---|\n| O1 | LY CB12 Tri-Band 4G & 5G Antenna (2 pcs) | Image | US | $41.99 | 1 | $41.99 | \u20ac34.74 | 0.33 | \u20ac11.58 |\n| O2 | TP-Link Tapo P100 Mini Smart Plug (2 pcs) | Image | US | $17.99 | 1 | $17.99 | \u20ac21.38 | 0.33 | \u20ac7.13 |\n| O3 | DEWALT 20V MAX XR Brushless Impact Wrench (1 pc) | Image | US | $18.99 | 1 | $18.99 | \u20ac15.28 | 0.33 | \u20ac5.09 |\n| O4 | 4500mAh 11.1v 55c Lipo Battery (1 pc) | Image | US | $18.99 | 1 | $18.99 | \u20ac35.46 | 0.33 | \u20ac11.82 |\n| O5 | AESON Mini HDMI to HDMI Cable (3 pcs) | Image | US | $12.99 | 1 | $12.99 | \u20ac12.86 | 0.33 | \u20ac4.29 |\n| O6 | 7.4V 2500mAh Lipo Battery (2 pcs) | Image | US | $22.99 | 1 | $22.99 | \u20ac21.93 | 0.33 | \u20ac7.31 |\n| O7 | Ricoh Theta SC2 360-degree Camera (1 pc) | Image | US | $229.99 | 1 | $229.99 | \u20ac277.29 | 0.33 | \u20ac92.43 |\n| O8 | Creality Ender 3 S1 Pro 3D Printer (1 pc) | Image | US | $299.00 | 1 | $299.00 | \u20ac501.12 | 0.33 | \u20ac167.04 |\n| O9 | Oculus Meta Quest 2 Advanced VR Headset (1 pc) | Image | US | $299.00 | 1 | $299.00 | \u20ac423.63 | 0.33 | \u20ac141.21 |\n| O10 | Apple AirPods Pro 2 Wireless Earbuds (1 pc) | Image | US | $169.00 | 1 | $169.00 | \u20ac211.91 | 0.33 | \u20ac70.64 |\n| O11 | OnePlus Buds Pro Wireless Earphones (1 pc) | Image | US | $79.99 | 1 | $79.99 | \u20ac79.95 | 0.33 | \u20ac26.65 |\n| O12 | Anker Soundcore Bluetooth Speaker (1 pc) | Image | US | $24.99 | 1 | $24.99 | \u20ac29.28 | 0.33 | \u20ac9.76 |\n| O13 | SanDisk 1TB Extreme Portable SSD (1 pc) | Image | US | $69.99 | 1 | $69.99 | \u20ac92.53 | 0.33 | \u20ac30.84 |\n| O15 | Samsung Galaxy S22 Ultra 5G Smartphone (1 pc) | Image | US | $799.99 | 1 | $799.99 | \u20ac954.50 | 0.33 | \u20ac318.17 |\n| O16 | Meta Quest 2 VR Gaming Headset (1 pc) | Image | US | $299.00 | 1 | $299.00 | \u20ac369.69 | 0.33 | \u20ac123.23 |\n| O17 | OnePlus 10T 5G Smartphone (1 pc) | Image | US | $499.00 | 1 | $499.00 | \u20ac714.94 | 0.33 | \u20ac238.31 |\n| O18 | Samsung Galaxy Tab A7 Lite Tablet (1 pc) | Image | US | $159.99 | 1 | $159.99 | \u20ac199.43 | 0.33 | \u20ac66.48 |\n| O19 | AITRIP ESP32-DevKitC Development Board (3 pcs) | Image | US | $19.99 | 1 | $6.66 | \u20ac21.00 | 0.33 | \u20ac7.00 |\n| O20 | 5.08mm PCB Terminal Block (11 x 2 Pin + 11 x 4 Pin) | Image | US | $7.99 | 1 | $7.99 | \u20ac11.99 | 1 | \u20ac11.99 |\n| O21 | M6 x 1m (1000mm) Zinc Plated Threaded Rod/Bar | Image | US | $22.73 | 1 | $22.73 | \u20ac4.31 | 1 | \u20ac4.31 |\n| O23 | M4 x 50mm screws (30 pcs) | Image | US | $11.39 | 1 | $11.39 | \u20ac9.83 | 1 | \u20ac9.83 |\n| O24 | M4 x 40mm screws (30 pcs) | Image | US | $10.56 | 1 | $10.56 | \u20ac9.13 | 1 | \u20ac9.13 |\n| O25 | M3 screws and nuts sets (440 pcs) | Image | US | $21.99 | 1 | $21.99 | \u20ac32.01 | 0.33 | \u20ac10.67 | This document provides an overview of the materials, components, and associated costs involved in assembling the MTV.\n\n**Materials and Components:**\n\nThe externally sourced components for the MTV include:\n\n| Item | Description | Image | Purchase Link (US | Purchase Link (EU) | Price (US | Price (EU) | Quantity | Total Cost |\n|---|---|---|---|---|---|---|---|---|\n| O25 | M3 x 8mm screws (440 pcs) | [Image] | [US Link](https://www.amazon.com/XunLiu-screws-Stainless-Fastener-furniture/dp/B0779YMMGH/ref=sr_1_10?crid=2V3O2C3G9K8SI&keywords=m3+screws+and+nuts+sets+%28440+pcs%29&qid=1650639424&sprefix=m3+screws+and+nuts+sets+440+pcs+%2Caps%2C257&sr=8-9) | [EU Link](https://www.amazon.de/-/en/pieces-screws-stainless-hexagon-socket/dp/B093GNHWKR/ref=sr_1_3?crid=36NK6MT1K8LSC&keywords=satz+m3+innensechskant&qid=1644166735&s=diy&sprefix=set+m3+hex+socket%2Cdiy%2C87&sr=1-3) | $6.49 | \u20ac11.99 | 1 | $6.49 | \u20ac11.99 |\n| O26 | M4 x 120mm screws (15 pcs) | [Image] | [US Link](https://www.amazon.com/XunLiu-Socket-Screws-Wrench-Knurled/dp/B07CHJ7ZPP/ref=sr_1_38?crid=1U3GJUWD14XQB&keywords=screw%2BM4%2Bx%2B120mm&qid=1650753655&sprefix=screw%2Bm4%2Bx%2B120mm%2Caps%2C167&sr=8-38&th=1) | [EU Link](https://www.amazon.de/-/en/sourcing-Phillips-Furniture-Hanging-External/dp/B08JYCP7TD/ref=sr_1_64?crid=25999O4GLCN83&keywords=m4+x+120+mm&qid=1644165946&sprefix=m4+x+120mm+socket%2Caps%2C109&sr=8-64) | $15.29 | \u20ac15.64 | 1 | $15.29 | \u20ac15.64 |\n| O27 | M6 x 100mm screws (2 pcs) | [Image] | [US Link](https://www.amazon.com/MroMax-Machine-Stainless-Phillips-Cabinet/dp/B07YFMN8FP/ref=sr_1_5?crid=3GFYNNIBEO5UA&keywords=M6+x+100mm+screws&qid=1650639588&sprefix=m6+x+100mm+screws+2+pcs+%2Caps%2C161&sr=8-5) | [EU Link](https://www.amazon.de/-/en/AG-BOX%C2%AE-Cylinder-Screws-Stainless-Steel/dp/B09N3DNDZK/ref=sr_1_5?crid=1NROTQHUR7F2K&keywords=m6x100+mm&qid=1644171138&sprefix=m6+x+100mm%2Caps%2C90&sr=8-5) | $3.84 | \u20ac4.18 | 1 | $3.84 | \u20ac4.18 |\n| O28 | Assorted Nuts (Full and Self Locking) and Washers M3, M4 & M5 (45 pcs) | [Image] | [US Link](https://www.amazon.com/Assortment-AETTL-Stainless-Assorted-Wrenches/dp/B098ND1GW8/ref=sr_1_2?crid=3AFZPJ5SIBJD9&keywords=Assorted+Nuts+%28Full+and+Self+Locking%29+and+Washers+M3%2C+M4+%26+M5+%2845+pcs%29&qid=1650639662&sprefix=assorted+nuts+full+and+self+locking+and+washers+m3%2C+m4+%26+m5+45+pcs+%2Caps%2C226&sr=8-2) | [EU Link](https://www.amazon.de/-/en/Assorted-Locking-Washers-Stainless-Steel/dp/B01CO9S1RI/ref=sr_1_98?crid=P8DDSVM9ZTG1&keywords=m3+m4+muttern+unterlegscheiben&qid=1644172060&sprefix=m3+m4+nuts+washers%2Caps%2C101&sr=8-98) | $25.64 | \u20ac23.01 | 1 | $25.64 | \u20ac23.01 |\n| O29 | M6 Nuts/Washers Set (40 pcs) | [Image] | [US Link](https://www.amazon.com/Stainless-Washer-Machine-Thread-Assortment/dp/B09HNJBX1G/ref=sr_1_3?crid=1N4R1D32PS8W6&keywords=40-Piece+Nuts%2FWashers+Set+for+M6&qid=1650639826&sprefix=40-piece+nuts%2Fwashers+set+for+m6+threaded+rods+a2+stainless+steel%2Caps%2C227&sr=8-3) | [EU Link](https://www.amazon.de/-/en/40-Piece-Washers-Threaded-Stainless-Steel/dp/B01G77C0DY/ref=sr_1_31?crid=EUR5CW3K5BLD&keywords=m6+muttern+und+unterlegscheiben&qid=1644172515&refinements=p_36%3A118557031&rnid=118555031&s=diy&sprefix=m6+nuts+and+washers%2Caps%2C87&sr=1-31) | $13.99 | \u20ac8.99 | 1 | $13.99 | \u20ac8.99 |\n| O30 | Stainless Steel Flat and Spring Washers Assortment | [Image] | [US Link](https://www.amazon.com/Split-Washer-Assortment-Stainless-Silver/dp/B08QN3ZR23/ref=sr_1_35?keywords=Stainless+Steel+Flat+and+Spring+Washers+Assortment+Kit&qid=1650753391&sr=8-35) | [EU Link](https://www.amazon.de/-/en/300-pieces-Stainless-Washers-Assortment-M2-M3-M4-M5-M6/dp/B07CQX6NPP/ref=sr_1_15?keywords=m4+federscheibe&qid=1650753193&sr=8-15) | $12.49 | \u20ac10.38 | 1 | $12.49 | \u20ac10.38 |\n\n**Total Cost of Externally Sourced Components:** Approximately **500\u20ac**\n\n**Assembly Cost:** Estimated at **100\u20ac**\n\n**Total Estimated Cost:** Approximately **600\u20ac** The Mars Terrain Vehicle (MTV) assembly process comprises seven steps:\n\n**1. Motor Modules Assembly:**\n\nEach module consists of a motor, motor enclosure, and bracket. Connect the motor's control cable, screw it to the enclosure, and secure it with glue. Slide the bracket over the enclosure and glue it in place. Add cable glands to the bottom enclosure and run the wires through. Assemble the top and bottom enclosures using bolts and nuts.\n\n**2. Legs Assembly:**\n\nThe MTV has two legs, each with joints and leg segments. Assemble the joints and attach the legs using bolts and washers. Secure the middle and front legs to the 90-degree joint, and the rear leg to the 100-degree joint (or its mirror for the right leg).\n\n**3. Chassis Assembly:**\n\nThe chassis is assembled using 3D-printed parts and bolts. Secure the motor modules to the chassis with bolts and nuts, and mount the legs to the chassis using bolts and washers.\n\n**4. Main Compartment Assembly:**\n\nThe main compartment is assembled within the chassis. Install the controller and power unit, and run wires between them and the motor modules and legs.\n\n**5. Vehicle Assembly:**\n\nAssemble the main compartment to the chassis, and mount the tires and wheels onto the motor modules.\n\n**6. Power Unit, Control Unit, and Component Connection:**\n\nConnect the power unit, control unit, and various components according to the instructions.\n\n**7. Testing, Programming, and Use:**\n\nConduct system tests, program the vehicle, and prepare it for operation. **Leg Assembly:**\n- Attach leg assemblies to chassis using bolts, washers, and nuts.\n- Attach tires to wheel hubs and secure with nuts.\n\n**Chassis Assembly:**\n- Attach motor brackets to leg ends using bolts, washers, and nuts.\n- Attach motors to brackets and connect cables.\n- Install cable duct.\n\n**Main Compartment Assembly:**\n- Join rear and front compartments using screws, washers, and nuts.\n- Drill holes for bearings and insert into bearing cover.\n- Secure bearing assembly to compartment using bolts, washers, and nuts.\n- Install cable glands and attach buffers.\n- Attach battery mount and insert threaded bar.\n- Add nuts as spacers and balance the bar to prevent side wall tension.\n\n**Vehicle Assembly:**\n- Connect chassis to threaded bar on either side of the compartment using nuts.\n- Attach shock absorbers between chassis and mid leg.\n- Replace factory springs with medium compression springs. **Part I: Hardware Assembly**\n\n* Attach the piston to the robot compartment using the spacer, bolt, and lock nut provided.\n* Fix the rod end using M3 x 40 bolts and nyloc.\n* Wire the leg assembly to the main body and manage cables using the cable management trunking.\n* Attach the phone mount to the roof front, securing the universal smart phone holder and vibration isolators with lock nuts and washers.\n* Fix the voltage display, power control switches, and Micro USB cable on the roof rear.\n* Pull out the head and tail lights, then attach the roof front and roof rear to the compartment assembly.\n* Insert and secure the lights into their respective slots, sealing them with light endcaps.\n* Slide in the customized Name Front and Name Back plates.\n\n**Part II: Electronics Assembly**\n\n* Connect the battery to the Power distribution PCB using splicing connectors.\n* Connect the voltage display to the Power distribution PCB.\n* Connect the lights to the MTV's main PCB.\n* Assemble the Power distribution PCB into the main housing and connect the motor cables.\n* Attach the main PCB to the MTV and connect the lighting and voltage connections to the Power distribution PCB. **Mounting Holes:**\n\n* M3-type mounting holes (Mount_1 to Mount_4) connect the Main PCB to the MTV using screws and nuts.\n* M3-type mounting holes (MD_Mount_1 to MD_Mount_4) connect the motor drivers to the Main PCB in the same manner.\n\n**Roof Cable Assembly:**\n\n* Switches and displays in the MTV roof are connected to the Main PCB using splicing connectors.\n* The ESP 32's Micro USB port is extended using a 90-degree Micro USB extension cable, allowing users to connect USB cables without removing the roof.\n\n**Safety Disclaimer:**\n\n* Assembly requires intermediate to advanced expertise in mechanical and electronic devices.\n* Exercise caution due to sharp edges on 3D printed components.\n* Maintain a safe distance during operation.\n* Be aware of potential finger entrapment.\n* Avoid direct sunlight, wet environments, fire, and excessive height.\n* Use designated tools and avoid excessive force.\n* Prevent drops and handle the Li-Po battery safely.", + "https://github.com/ob-f/OpenBot/blob/master/body/rc_truck/README.md": "OpenBot's RC-Truck body is designed for 1:16 scale RC-toy trucks/buggies. It consists of a chassis with two main components: a compatible RC-toy truck and custom 3D-printed parts. The required printed parts include the main frame, side covers, phone mount, and optional parts for compactness and aesthetics. The main frame requires a minimum build plate size of 260mmx220mm, while other parts require 220mmx60mm. Users can modify the main frame's STEP file using Autodesk Fusion 360 to fit smaller build volumes. Compatible RC-toy trucks can be purchased from vendors like Amazon, Ebay, or AliExpress, ensuring 1:16 scale compatibility. OpenBot RC-Truck body provides a customizable and extensible chassis for your robotic build. **3D Printing and Assembly:**\n\nTo 3D print the OpenBot RC-Truck, specific settings are recommended for optimal results. Using a support structure can enhance the print, although it may require additional effort to remove later. Additionally, using contrasting colors for different parts can improve the aesthetics.\n\nFor assembly, it is advisable to use the custom PCB provided by OpenBot. This option simplifies the build process and allows for easy switching between different OpenBot bodies. The PCB can be connected to the required components, including an Arduino Nano, USB OTG cable, and RC-toy truck. Optional components such as an ultrasonic sensor, LEDs, and resistors can also be integrated.\n\n**Bill of Materials:**\n\nThe OpenBot RC-Truck requires various readily available hobby electronics, including an RC-toy truck, Arduino Nano, custom PCB, USB OTG cable, screws, nuts, and wires. Optional components include an ultrasonic sensor, switches, and LED lights.\n\n**Build Instructions:**\n\nThe build instructions start by disassembling the RC-toy truck, exposing the motor connectors and UBEC output. The motor connectors are plugged into the speed controller on the PCB, and the UBEC output powers the rest of the components. The main frame is then mounted on the truck body, and the LED lights are installed for illumination. This guide provides instructions for assembling the main frame of an RC truck. It explains the installation of the ON/OFF switch, ultrasonic sensor, orange LEDs for indicator signals, front LED lamps, and rear Red LEDs. Dimensions for the main frame are provided, and modifications may be necessary depending on the truck model used. Autodesk Fusion 360 is recommended for CAD modifications. The installation steps include soldering the switch in-between the positive wire, gluing the ultrasonic sensor, connecting LEDs in parallel, running cables through the back opening of the main frame, forming a unified ground loop for negative terminals, and connecting LED lamps to the UBEC output. The installation of the front light spacers and the construction of the voltage divider for rear lights are also briefly mentioned. **Building a Raspberry Pi-Based RC Truck with Arduino and Ultrasonic Sensor**\n\nThis guide provides step-by-step instructions for assembling an RC truck with a Raspberry Pi, Arduino, and ultrasonic sensor. Instructions include:\n\n**1. Wiring the Front LED Lamps:**\n- Connect red/black wires of front LEDs to their respective terminals on the PCB.\n\n**2. Wiring the Ultrasonic Sensor:**\n- Connect signal and ground wires of the sensor to the corresponding ports on the PCB.\n\n**3. Installing a Switch (Optional):**\n- Add a switch to control power supply or LED functionality.\n\n**4. Installing Voltage Divider (Optional):**\n- Install a voltage divider for Red LEDs to ensure safe operation (2-3V).\n\n**5. Wiring Rear LEDs (Optional):**\n- Connect Red LED wires to the voltage divider and UBEC output.\n\n**6. Wiring the Speed Controller UBEC:**\n- Connect UBEC output (+6V) to the Arduino Nano's Vin pin and GND to the GND pin.\n\n**7. Securing the Wiring:**\n- Insulate and secure all wires using shrink tube or tape.\n\n**8. Mounting the Phone Mount Bottom:**\n- Attach the phone mount bottom to the main frame using screws.\n\n**9. Installing the Phone Mount Top:**\n- Insert the phone mount top and secure it with a spring or rubber band.\n\n**10. Installing the Side Covers:**\n- Insert the side covers into the main frame's slots.\n\n**11. Mounting the Main Frame to the RC Truck Body:**\n- Align the main frame with the RC truck body and secure it with mounting pins.\n\n**12. Mounting the PCB:**\n- Mount the PCB on the main frame using screws and spacers.\n\n**13. Connecting the Ultrasonic Sensor and Indicator LEDs:**\n- Connect the ultrasonic sensor and indicator LED cables to the PCB's respective connectors.\n\n**14. Connecting Power and Other Devices:**\n- Connect the UBEC output to Arduino Nano (optional), steering servo, front LEDs, and rear Red LEDs.\n- Connect the steering servo ground to Arduino GND. To complete the assembly:\n\n1. Connect the PWM cable from the throttle servo to pin A0 and the PWM cable from the steering servo to pin A1 on the Arduino Nano.\n2. If a LED ground loop was created, connect it to a GND pin on the Arduino. Otherwise, ensure all components share a common ground.\n3. Secure the battery pack in the front using velcro or tape for easy recharging.\n4. Install the front and back electronic covers. Route the USB OTG cable through the gap in the rear cover for connecting to a smartphone.", + "https://github.com/ob-f/OpenBot/blob/master/body/rtr/README.md": "OpenBot's Ready-To-Run (RTR) vehicles are pre-built and designed for users who lack the resources or time to construct their own robots. These vehicles feature integrated electronics, firmware support, and thorough testing.\n\nTwo RTR models are available: RTR_TT and RTR_520. Both models are housed in splash-proof ABS shells but have distinct purposes. RTR_TT is primarily intended for indoor use, while RTR_520 is equipped with more advanced features like a high-performance processor, enhanced motors, and all-terrain wheels for both indoor and outdoor applications.\n\nFor those interested in creating their own RTR OpenBot, they can purchase the 3D-printable files for the chassis and manufacture the necessary PCBs. The process involves 3D printing the chassis components, producing the PCBs, and acquiring the required motors and phone mount.\n\nThe RTR OpenBot can be ordered online, and the build process includes 3D printing the chassis, creating the PCBs, and assembling the components. Depending on the preferred motor type (TT or 520), specific PCBs are required. The assembly process involves connecting the PCBs and installing the motors, sensors, and other electronic components. This repository provides Gerber files, BOMs, and centroid files for various OpenBot PCB designs:\n\n- Speed Sensor PCB with Arduino interface\n- Base Board C with Arduino and ESP32 variants\n- Base Board B with Arduino and ESP32 variants\n- Bare Base Board A with Arduino and ESP32 variants\n- Motor Driver DRV8870 PCB with Arduino and ESP32 variants\n\nTo proceed, the Arduino firmware should be flashed as per the instructions provided in the \"Arduino Firmware\" README file.", + "https://github.com/ob-f/OpenBot/blob/master/controller/flutter/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md": "This project provides customizable launch screen assets for Flutter projects. You can replace the existing image files in this directory to modify the appearance of your launch screen.\n\nAlternatively, you can open your Flutter project's Xcode project and select \"Runner/Assets.xcassets\" in the Project Navigator. Drag and drop your desired images into this location to update the launch screen assets.", + "https://github.com/ob-f/OpenBot/blob/master/controller/flutter/README.md": "**Summary:**\n\nThe Flutter Controller App serves as a remote control for OpenBot vehicles, similar to a Bluetooth controller. It allows users to drive the robot via on-screen controls or tilt gestures, and provides live video/audio streaming.\n\n**Getting Started:**\n\nTo use the app, install Flutter on your system and navigate to the OpenBot/controller/flutter directory. Use terminal commands to install dependencies (flutter pub get) and run the project (flutter run). Alternatively, you can set up Flutter in your editor and run the project using the \"run\" button.\n\n**Connection:**\n\nThe app attempts to connect to the robot automatically. To connect manually, set the robot's control mode to \"Phone\" in the robot's app.\n\n**Operation:**\n\n* **On-screen controls:** Use sliders for Dual Drive mode (turn left/right by moving sliders up and down, move forward/backward by moving sliders above/below center). Indicators, camera switching, audio muting, and video mirroring are available.\n* **Tilt to drive:** Use accelerometer to control robot. Accelerate by pressing \"accelerator,\" brake by pressing \"brake.\" Release \"accelerator\" to slow down and stop, or press \"brake\" to stop immediately. The OpenBot app allows you to control an OpenBot robot with your phone. The app features a two-button interface: one button for forward movement and the other for reverse movement. When you hold down the forward button, the robot will accelerate forward until it reaches the maximum forward speed in one second. When you release the button, the robot will coast to a stop. The reverse button works in a similar way, but the robot will move in reverse. You can steer the robot by tilting your phone left or right. The faster you tilt the phone, the faster the robot will turn. If you hold down the brake button for one second, the robot will stop moving. When you let go of the brake, the robot will resume its previous speed and direction.", + "https://github.com/ob-f/OpenBot/blob/master/firmware/README.md": "This Arduino firmware, compatible with the Arduino Nano and ESP32 development kit, acts as a bridge between the OpenBot robot and a smartphone. The microcontroller unit (MCU) handles low-level vehicle control, receiving and converting commands from the smartphone via serial connection. It also tracks wheel rotations, estimates battery voltage, and detects obstacles using optional sensors.\n\nTo set up the firmware, configure hardware settings and enable desired features. Enable the voltage divider, indicator LEDs, speed sensors, ultrasonic sensor, bumper sensor, OLED display, or various LEDs as needed.\n\nInstall dependencies such as PinChangeInterrupt for speed and ultrasonic sensors, and Adafruit libraries for the OLED display. To install libraries, navigate to the Arduino Library Manager, search for the library, and click install.\n\nIf using a Chinese clone Nano, download WCH340 drivers from the chip manufacturer. **ESP32 Development Kit Setup**\n\nTo install the ESP32 board in your Arduino IDE:\n- Add the ESP32 Board Manager URL to your preferences.\n- Install the \"ESP32 by Espressif Systems\" board from the Boards Manager.\n- Set the upload speed to 115200.\n- Select the \"ESP32 Dev Module\" board in the Tools > Board menu.\n\n**Upload Settings**\n\n**Arduino Nano:**\n- Board: Arduino Nano\n- Processor: ATmega328P (Old Bootloader)\n- Port: Select the USB port\n\n**ESP32:**\n- Board: ESP32 Dev Module\n- Port: Select the USB port\n\n**Firmware Upload:**\n\nUpload the firmware using Sketch > Upload or the upload button.\n\n**Testing**\n\n- Ensure wheels are disconnected and Arduino is connected.\n- Open the Serial Monitor (Tools > Serial Monitor).\n\n**Message Commands:**\n\n- `c,`: Control motor speed (-255 to 255).\n- `i,`: Control indicator LEDs (0 or 1).\n- `l,`: Control LED brightness (0 to 255).\n- `s`: Set sonar measurement interval (default: 1000ms).\n- `w`: Set wheel odometry measurement interval (default: 1000ms). **Commands for OpenBot Control and Monitoring:**\n\n* **Voltage Monitoring:**\n * `v`: Set voltage monitoring interval and receive voltage, min driving voltage, min battery voltage, and max battery voltage.\n\n* **Heartbeat and Bumper Trigger:**\n * `h`: Set heartbeat interval to stop robot if heartbeat is lost.\n * `b`: Set bumper trigger reset time.\n\n* **LED Control:**\n * `n,`: Control status LEDs (blue, green, yellow).\n\n* **Robot Information:**\n * `f`: Request robot type and features.\n\n**Messages from OpenBot:**\n\n* **Voltage:** `v` followed by battery voltage.\n* **Speed:** `w` followed by speed sensor readings in rpm.\n* **Sonar Distance:** `s` followed by estimated free space in front of ultrasonic sensor.\n* **Collision:** `b` followed by sensor code indicating collision (left/right front, center front, left/right back).\n\n**Test Procedure:**\n\n1. Check battery voltage with a multimeter and adjust voltage factor if necessary.\n2. Test ultrasonic sensor for sensitivity to vibrations by placing it in open space and sending command `c128,128`.\n3. Verify speed sensor readings by sending different control commands.\n4. Stop motors by sending command `c0`. **Car Control and Testing**\n\nTo control the car without a smartphone, set `NO_PHONE_MODE` to `1`. The car will drive at 75% PWM and slow down when obstacles are detected. When it approaches within `TURN_THRESHOLD` (default: 50cm) of an obstacle, it will turn randomly and activate the LED on that side. If available space in front of the car falls below `TURN_THRESHOLD`, it will reverse slowly with both LEDs illuminated.\n\nTo test without the tires, connect the Arduino to a computer and monitor the serial output. This provides information on battery voltage, motor RPM, and estimated free space in front of the car. Move an object in front of the ultrasonic sensor to observe changes in motor speed.\n\n**MCU Requirements (For Other Controllers)**\n\nFor MCUs other than Arduino, the following features are required:\n\n* USB-to-TTL Serial for smartphone communication\n* 4x PWM output for motor control\n* 1x analog pin for battery monitoring\n* 2x digital pins for speed sensors\n* 1x digital pin for ultrasonic sensor (optional)\n* 2x digital pins for indicator LEDs (optional)", + "https://github.com/ob-f/OpenBot/blob/master/ios/OpenBot/OpenBot/Authentication/README.md": "Firebase is a mobile and web app development platform that provides various services and tools for developers to efficiently build high-quality apps. It offers features like real-time databases, user authentication, hosting, cloud storage, and more, all integrated into one platform. Firebase simplifies backend infrastructure management, allowing developers to focus on user experience.\n\nIn this application, Firebase is utilized for Google Sign-In authentication, enabling users to access the OpenBot Playground projects on Google Drive conveniently and securely. However, if you plan to build the iOS app on your device, it's crucial to set up Firebase for the OpenBot Playground web application as well. This ensures proper functioning of Google Drive services.\n\nTo prepare for Firebase integration in your iOS OpenBot application, you'll need a Google account and Cocoapods, a dependency manager for Swift and Objective-C projects. Once these prerequisites are met, you can set up your Firebase project by creating a new project, adding an iOS app (registering it and downloading the GoogleService-Info.plist file), and configuring Firebase services. To enable Google Sign-In authentication, navigate to \"Authentication\" in the Firebase Console, click \"Get Started,\" and follow the subsequent steps. **Firebase iOS Setup**\n\nThis document provides a guide for setting up Google Sign-In and the Firebase SDK in an iOS project. To enable Google Sign-In authentication, open the Firebase console and click the toggle button.\n\n**iOS Project Setup**\n\nInstall the Firebase SDK using CocoaPods by creating a Podfile with the following contents:\n\n```\npod 'Firebase/Core'\npod 'Firebase/Storage'\npod 'Firebase/Auth'\npod 'GoogleSignIn'\npod 'GoogleSignInSwiftSupport'\npod 'GoogleAPIClientForREST/Drive'\n```\n\nAfter saving the Podfile, run `pod install` to install the SDKs.\n\n**Troubleshooting**\n\n**CocoaPods Installation Errors:**\n\n* Update CocoaPods version (`sudo gem install cocoapods`)\n* Delete Podfile.lock and run `pod install` again\n* Ensure Firebase SDK version in Podfile matches Firebase Console\n\n**Firebase Configuration Errors:**\n\n* Verify \"Set up the iOS Project\" steps are followed correctly\n* Check Firebase configuration file (GoogleService-Info.plist) is added to the project\n* Ensure Firebase SDK is imported and added to the project\n* Verify Firebase project settings in the Firebase Console, including bundle ID\n\n**Library Conflicts:**\n\n* Specify correct Firebase SDK version in Podfile\n* Update other libraries to the latest version\n* Resolve conflicting dependencies by removing or updating libraries", + "https://github.com/ob-f/OpenBot/blob/master/ios/OpenBot/README.md": "The Robot iOS Beta App offers basic robot control with live information (battery, speed, distance). It features a Bluetooth Low-Energy (BLE) wireless connection that displays compatible devices and pairs with OpenBot vehicles.\n\nThe Main Menu provides access to screens for Bluetooth connection, settings, and various functionalities. The Bluetooth connection screen lists compatible devices and establishes connections. The Free Roam screen allows for easy robot control with real-time updates.\n\nThe Control screen enables selection of control modes (Gamepad or Phone) and drive modes (Game, Joystick, Camera). The Gamepad mode utilizes a connected BT controller for control, while the Phone mode allows for control via another smartphone or Python script. Drive modes include forward, neutral, and reverse.\n\nThe app also includes a Projects tab for viewing saved projects and a Profile tab for accessing user information and signing out. It is recommended to operate in a safe environment and use at your own risk due to potential app instability and the risk of phone damage in collisions. **Controls:**\n\n* Use joysticks to maneuver the robot in one of two modes:\n * **Single**: Control the robot with one joystick.\n * **Dual**: Control the left and right sides of the robot independently.\n\n* The third button selects the speed mode (Slow, Normal, Fast).\n\n**Data Logging:**\n\n* Four logging modes:\n * **only_sensors**: Saves only sensor data.\n * **crop_img**: Saves sensor data and cropped images (default).\n * **preview_img**: Saves sensor data and full-size images.\n * **all_imgs**: Saves sensor data and both cropped and full-size images.\n\n**Camera:**\n\n* Preview and cropping resolutions are displayed.\n* Switch to toggle between rear and front cameras.\n\n**Model:**\n\n* Two pre-installed models:\n * **MobileNetV1-300**: Object detection for person following.\n * **CIL-Mobile**: Autonomous navigation (may require training).\n* Switch to enable/disable neural network controls.\n\n**Device:**\n\n* Select device for neural network execution (CPU, GPU, NNAPI).\n* Inference speed is displayed for active models.\n\n**Data Collection:**\n\n* UI for collecting datasets.\n* Adjustable camera preview and model resolutions. The iOS app provides a graphical user interface for controlling and monitoring a wheeled robot from a smartphone.\n\n**Data Collection:**\n* Data collection can be initiated and stopped via a Bluetooth controller.\n* The collected data can be saved in a .zip file or discarded.\n\n**Vehicle Status:**\n* Displays the battery voltage, left and right wheel speeds, and the available space in front of the car measured by sensors.\n\n**Sensors:**\n* Records readings from various sensors, including cameras, gyroscopes, accelerometers, and ultrasonic sensors.\n* Body sensor readings and control signals from connected controllers are also recorded.\n\n**Controller Mapping:**\n* Provides a way to check and configure button and joystick mappings for Bluetooth controllers.\n\n**Robot Info:**\n* Displays the robot type and readings from key sensors.\n* Allows users to send basic motor commands and control LEDs.\n\n**Autopilot:**\n* Enables running of trained autopilot models on the robot.\n* Allows selection of trained models, inference device, and control settings.\n\n**Object Tracking:**\n* Provides an interface for tracking objects using deep learning models.\n* Allows customization of object detection models, confidence threshold, and robot speed based on object proximity. **Navigation Screen:**\n\nAllows users to set a goal location for the robot using forward and left values. A popup with input fields and \"Cancel\" and \"Start\" buttons appears. \"Start\" triggers navigation, and the robot moves to the destination.\n\n**Projects Screen:**\n\nDisplays a list of OpenBot Playground projects if signed in. Users can execute projects or scan their QR codes. If not signed in, only QR code scanning is available. Pulling down the screen reloads projects.\n\n**Google Drive Projects:**\n\nTo execute, tap on the project, then confirm by clicking \"Start\" in the popup.\n\n**QR Code Scanner:**\n\nGrant camera access, scan the project code, and confirm by clicking \"Start\" in the popup.\n\n**Executing Project:**\n\nShows the executed code blocks with a stop button to terminate execution.\n\n**Delete Project:**\n\nLong-press on a project, confirm the deletion in the popup.\n\n**Profile Screen:**\n\nDepending on sign-in status:\n\n* **Signed Out:** \"G\" button to sign in.\n* **Signed In:** \"About\" and sign-out options. This iOS application provides a user interface for implementing OpenBot technology. When the user is not signed in, they will see a \"Sign In with Google\" button. Upon signing in, they gain access to profile options, including \"Edit Profile\" and \"Logout.\" The \"Edit Profile\" button allows users to update their profile information, while the \"Logout\" button logs them out of the account.\n\nAdditionally, the app integrates with OpenBot Playground services. To access these services, users click on the Playground icon. The \"Code Structure\" section indicates that the app utilizes the TensorFlow Lite Object Detection iOS Demo as a foundation and includes TFLite models and camera feed capabilities. Users can also optionally train their own Driving Policy using Firebase Authentication.", + "https://github.com/ob-f/OpenBot/blob/master/open-code/src/components/blockly/README.md": "OpenBot PlayGround is a visual programming environment for OpenBot robots, providing customizable blocks for controlling flow, loops, operators, variables, lights, controller, sound, and sensors.\n\n**Control:** Allows for controlling program flow with blocks like Start, Conditionals, and Wait.\n\n**Loops:** Facilitates repetitive execution with blocks like Repeat and While.\n\n**Operators:** Enables performing calculations and creating expressions with Arithmetic and Math Operators.\n\n**Variables:** Stores and manipulates data within the program using blocks for declaring, setting, and changing variables.\n\n**Lights:** Controls indicators and brightness of the robot's LEDs dynamically.\n\n**Controller:** Allows selecting controller modes (Gamepad or Phone) and drive modes (Joystick, Game, or dual).\n\n**Sound:** Plays sound based on drive modes and robot speed.\n\n**Sensors:** Provides blocks that return readings for various sensor inputs, such as voltage, sonar, bumpers, and speed. **Summary:**\n\nOpenBot Playground provides various blocks for controlling robotics:\n\n**Phone Sensors:**\n- Measures Gyroscope, Acceleration, and Magnetic readings.\n\n**Car Sensors:**\n- Provides Sonar, Speed, and bumper collision detection.\n\n**Movement:**\n- Controls robot movement at different speeds and directions.\n- Includes blocks for setting speed and moving in multiple directions.\n\n**Artificial Intelligence (AI):**\n- Tracks objects, enables autopilot, and provides point goal navigation.\n- Includes blocks for Object Tracking, Autopilot, and Point Goal Navigation.\n\n**Advanced Artificial Intelligence (AI):**\n- Features modular blocks for detection and autopilot.\n- Allows multiple object detection and executes tasks based on detection results.", + "https://github.com/ob-f/OpenBot/blob/master/open-code/src/services/README.md": "**Firebase Google Sign-In Authentication**\n\nFirebase Google Sign-In Authentication allows users to log in to web apps using their Google credentials. This feature simplifies the login process and enhances security.\n\n**Setting Up Firebase Project**\n\n1. Create a new Firebase project from the Firebase Console.\n2. Register the web app with Firebase and add the Firebase SDK to the project's `.env` file.\n3. Store sensitive information securely in environment variables within the `.env` file.\n4. Enable Google as a sign-in method in Firebase Authentication. **Firestore Database:**\n\n* Enable Firestore database in Google Cloud Console, selecting production mode and choosing a location.\n* Configure security rules to allow read and write access for authenticated users.\n\n**Google Drive Services:**\n\n* Enable Google Drive API in Google Cloud Console.\n* Navigate to the \"Library\" section and search for Google Drive API.\n* Enable the API for the project.\n\n**Troubleshooting:**\n\n* **Invalid Credentials:**\n * Check client ID and API key in Firebase Console.\n * Review environment variables for typos.\n * Ensure correct settings when calling firebase.auth().signInWithPopup().\n * Verify SDK version and Google account validity.\n* **User Account Disabled:**\n * Re-activate or create a new account.\n * Check account status before authentication to display error messages if disabled.\n* **CORS Error:**\n * Verify expected web application behavior and check CORS settings. To resolve issues with data not loading or displaying correctly, consider the following steps based on your setup:\n\n**Web App with Client-Side Authentication:**\n- Ensure your web application domain is added and CORS is enabled in the Firebase Console Authentication settings.\n\n**Web App with Server-Side Authentication:**\n- Add necessary CORS headers to your server response to allow requests from your web application domain.\n\n**Firebase Hosting:**\n- CORS is automatically enabled when hosting your web application on Firebase Hosting.\n\n**Firebase Cloud Run:**\n- Use Firebase Cloud Run to serve API requests with CORS headers included.", + "https://github.com/ob-f/OpenBot/blob/master/open-code/README.md": "OpenBot Playground is a user-friendly platform where individuals can create instructions for OpenBot robots using drag-and-drop functionality.\n\nKey Features:\n\n* Sync project data with Google Drive for accessibility across devices.\n* Store projects locally for offline use.\n* Scan QR codes from the OpenBot mobile app to execute programs.\n* Retrieve saved files from Google Drive and load them effortlessly in the mobile app.\n* Design instructions for OpenBot without the need for coding.\n* Offers a responsive design optimized for mobile and tablet devices.\n\nStorage Options:\n\n* Local Storage: Projects are automatically saved to the user's device's browser cache, allowing for offline access.\n* Google Drive Storage: Projects can be uploaded to the user's Google Drive account, providing online availability and access from any device with internet connectivity.\n\nOpenBot Playground Screens:\n\n* Home Page: Features a carousel explaining the platform, a header with theme and sign-in options, and a project section listing saved projects.\n* Playground Page: Provides coding blocks for users to create robot instructions. Includes a header with project name, help center, and theme options, a workspace for dragging and dropping blocks, and a bottom bar with code generation, upload, zoom, model addition, and undo/redo functionalities.\n* Header: The website header includes the OpenBot-Playground logo, theme change icon for switching between light and dark modes, and a \"Sign-in\" button for Google account authentication. **User Login, Profile Options, and AutoSync:**\n\nUsers log in with their Google account, granting permission to modify Google Drive. Upon login, they can edit their profile and synchronize their machine learning models from the OpenBot app to the Playground's AI blocks via AutoSync. AutoSync updates a config.json file on Google Drive to list available models, which are then displayed in the Playground.\n\n**Playground Features:**\n\nThe Playground page has a header with project management options (rename/delete) and a help button with instructions.\n\n**Carousal and Project Section:**\n\nThree-step carousels on the homepage explain the application. The 'My Projects' section lists local and Google Drive-stored projects, with options to create new ones.\n\n**Workspace:**\n\nDrag-and-drop coding blocks allow users to create code in JavaScript or Python. Blocks can be selected, deleted, restored, and organized in the workspace, enabling error-free code generation. The Playground Bottom Bar provides functionalities essential for the openBot-PlayGround experience.\n\n\"Generate Code\" generates a QR code linking to a JavaScript/Python file uploaded to Google Drive for the project, providing convenient sharing and access on mobile devices. It also uploads an XML file representing the block configuration, allowing for project backup and collaboration.\n\n\"Code Editor\" opens a side window displaying the block code in JavaScript or Python, enabling users to examine the accuracy of their blocks.\n\n\"Add Model\" allows users to externally add AI models to the robot application and customize their configuration.\n\n\"WorkSpace Controller\" includes undo/redo buttons and zoom controls for ease of use.", + "https://github.com/ob-f/OpenBot/blob/master/policy/frontend/README.md": "OpenBot Policy Frontend is a project based on Create React App. It features several available scripts:\n\n* `yarn start`: Runs the app in development mode, accessible at http://localhost:3000. Live reloading and lint error monitoring are enabled.\n* `yarn test`: Launches the interactive test runner.\n* `yarn build`: Builds the app for production, optimizing performance and minifying the build. The output is stored in the `build` folder.", + "https://github.com/ob-f/OpenBot/blob/master/policy/README.md": "**Driving Policy for OpenBot**\n\n**Disclaimer:**\n\n* Safety: Use cautiously, as policies may crash the robot. Operate in a safe environment and ensure you have a connected game controller for emergency stops.\n* Compute Hardware: Training requires substantial resources and may impact your machine's performance. High-end hardware with ample RAM and GPU is recommended.\n* Patience: Acquiring a suitable driving policy requires data collection, hyperparameter tuning, and patience. Beginners in machine learning models may find this challenging.\n\n**Environment Setup:**\n\n* Create a conda environment using the provided environment files.\n* For GPU support, ensure appropriate drivers are installed.\n* Alternatively, manually create a conda environment, activate it, and install TensorFlow and additional packages as detailed for different operating systems.\n* Install essential packages from `requirements.txt` and additionally `requirements_web.txt` for web app use.\n\n**Training:**\n\n* A high-end laptop or workstation with ample RAM and dedicated GPU is recommended for efficient training.\n* Use TensorFlow for training, with appropriate GPU support if available. **Dataset Collection**\n\nTo train an autonomous driving policy, data must be collected using a Bluetooth game controller connected to a smartphone. The phone should be running the CIL-Mobile-Cmd model and data can be recorded by pressing the X button on a PS4 controller. Data will be stored in zip files within the OpenBot folder on the phone, organized by timestamp.\n\nFor optimal training, the dataset folder should be structured with subfolders for train_data and test_data. Each recording session should have its own folder within the train_data subfolder. Recordings can be extracted from the phone and copied manually or uploaded automatically to a Python server using the Web interface.\n\n**Data Conversion** (Optional)\n\nFor better training performance, the dataset can be converted into a specialized tfrecord format using the provided Python commands.\n\n**Policy Training**\n\nTraining can be performed using a Jupyter Notebook or by running a shell command. The Jupyter Notebook provides a guided training process, while the shell command allows for adjusting hyperparameters. The notebook can be opened using the provided command.\n\n**Web Interface**\n\nThe Web interface allows for uploading logs automatically and visualizing the trained model. Required Python packages include aiohttp, aiozeroconf, and imageio. **Training**\n\nTo train a model using the provided tfrecord dataset, execute:\n\n```bash\npython -m openbot.train\n```\n\nIf you have not converted your dataset to tfrecord format, add the `--create_tf_record` flag. To train directly from files, add the `--no_tf_record` flag.\n\nTo improve training, consider using a larger batch size, more epochs, and enabling batch normalization. The `pilot_net` model offers superior performance on some tasks while still running in real-time on most smartphones.\n\n**Deployment**\n\nAfter training, the best checkpoint is saved as `autopilot_float.tflite`. Place this file in the `networks` folder in the Android Studio project and recompile the app.\n\n**Web App**\n\nThe provided web app enables remote training and visualization of datasets, sessions, and models. Activate the OpenBot environment and run the command `python -m openbot.server`. The app can be accessed at [http://localhost:8000/#/uploaded](http://localhost:8000/#/uploaded) for uploaded data and [http://localhost:8000/#/models](http://localhost:8000/#/models) for trained models.", + "https://github.com/ob-f/OpenBot/blob/master/python/README.md": "The Python module enables Linux-based control of an OpenBot vehicle using a camera and joystick.\n\nThe robot operates in three modes:\n- **Debug:** Uses preloaded data instead of camera images and joystick commands.\n- **Inference:** Processes real-time camera images and joystick inputs using a Neural Network policy.\n- **Joystick:** Controls the robot directly via joystick in either \"Dual\" or \"Joystick\" mode.\n\nTo run the robot, execute `run.py` with arguments specifying the policy path, dataset path (for Debug mode), log path, inference backend, running mode, and control mode (for Joystick mode).\n\nFor training data generation, use `generate_data_for_training.py`. To optimize policy inference speed on Intel hardware, export the trained model to OpenVino using `export_openvino.py`.\n\nTests and example code demonstrate functionalities such as data downloading, OpenVino exporting, data generation, inference, joystick connection, motor connection, and video streaming to a Realsense camera.\n\nInstallation involves the setup of required Python modules for inference, joystick control, sensing, and actuation. The provided README describes the setup and requirements for running OpenBot, a Python application for controlling a robotic arm with a camera and controller. The tested hardware configuration includes an Up Core Plus board, Realsense D435i camera, Xbox One controller, and OpenBot Firmware for Arduino. Python 3.9 and various Python modules are required for operation, including pyserial, pyrealsense2, opencv-python, pygame, and openvino-dev. Installation instructions are provided for these modules and for adding the OpenBot policy module to PYTHONPATH when using TensorFlow for inference. The code is designed for Linux-based computers and may require modifications or alternative Python modules for MacOs or Windows. Specific instructions are provided for using pyrealsense2-macosx and building TfLite for MacOS." +} diff --git a/openbot-qna-main/temp_files/README.md b/openbot-qna-main/temp_files/README.md new file mode 100644 index 000000000..28fea56e0 --- /dev/null +++ b/openbot-qna-main/temp_files/README.md @@ -0,0 +1,1864 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + OpenBot/README.md at master · isl-org/OpenBot · GitHub + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + +
+ Skip to content + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + +
+ +
+ + + + + + + + +
+ + + + + +
+ + + + + + + + + +
+
+
+ + + + + + + + + + + + +
+ +
+ +
+ +
+ + + + / + + OpenBot + + + Public +
+ + +
+ +
+ + +
+
+ +
+
+ + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+
+ + + + +
+ +
+ +
+
+ +
+ +
+

Footer

+ + + + +
+
+ + + + + © 2024 GitHub, Inc. + +
+ + +
+
+ + + + + + + + + + + + + + + + + + + +
+ +
+
+ + + diff --git a/openbot-qna-main/temp_files/enrolment-guide.pdf b/openbot-qna-main/temp_files/enrolment-guide.pdf new file mode 100644 index 000000000..651000552 Binary files /dev/null and b/openbot-qna-main/temp_files/enrolment-guide.pdf differ