amc:ss2025:group-a:start
Differences
This shows you the differences between two versions of the page.
Both sides previous revisionPrevious revisionNext revision | Previous revision | ||
amc:ss2025:group-a:start [2025/07/29 09:43] – [Software Flow] 35120_students.hsrw | amc:ss2025:group-a:start [2025/07/29 15:11] (current) – [Data analysis] 35120_students.hsrw | ||
---|---|---|---|
Line 1: | Line 1: | ||
+ | ====== Gießwagen - Plant Detection ====== | ||
+ | |||
+ | By Dylan Elian Huete Arbizu (35120) | ||
==== Introduction ==== | ==== Introduction ==== | ||
Line 31: | Line 34: | ||
* Wi-Fi Network: For ESP32 to connect and transmit data. | * Wi-Fi Network: For ESP32 to connect and transmit data. | ||
- | ===== Pin Assignments | + | ==== Pin Assignments ==== |
^ Function | ^ Function | ||
Line 637: | Line 640: | ||
* Connect a client to ESP32' | * Connect a client to ESP32' | ||
+ | ==== Circuit diagram ==== | ||
+ | |||
+ | {{ : | ||
+ | |||
+ | ==== Client Software for Data Reception and Visualization ==== | ||
+ | A fully functional Python client application logs incoming data and visualizes it as a heatmap in real time: | ||
+ | |||
+ | * Raw Data Reception: Receives packets of 136 bytes each (64 x 2-byte sensor readings + 8 bytes timestamp) from the ESP32 over a TCP socket. | ||
+ | |||
+ | * Data Logging: Writes each received frame with microsecond-precision timestamps to a CSV file for later analysis. | ||
+ | |||
+ | * Live Visualization: | ||
+ | |||
+ | * Threading/ | ||
+ | |||
+ | * Safe Shutdown: Ensures sockets and files are properly closed when the application exits. | ||
+ | |||
+ | <code python> | ||
+ | import socket | ||
+ | import struct | ||
+ | import csv | ||
+ | import tkinter as tk | ||
+ | from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg | ||
+ | import matplotlib.pyplot as plt | ||
+ | import numpy as np | ||
+ | from scipy.ndimage import zoom | ||
+ | import threading | ||
+ | |||
+ | ESP32_IP = " | ||
+ | ESP32_PORT = 5055 | ||
+ | CSV_FILENAME = " | ||
+ | FRAME_SIZE = 8 + 64 * 2 # 8 bytes timestamp + 128 bytes frame data | ||
+ | UPSCALE_FACTOR = 8 # For smooth 8x8 -> 64x64 heatmap | ||
+ | |||
+ | latest_matrix = None | ||
+ | latest_timestamp = None | ||
+ | lock = threading.Lock() | ||
+ | |||
+ | def re_range(pMatrix): | ||
+ | pMatrix = np.array(pMatrix) | ||
+ | nMax = pMatrix.max() | ||
+ | rat = nMax / 5000 if nMax != 0 else 1 | ||
+ | r_matrix = pMatrix / rat | ||
+ | return r_matrix.astype(int) | ||
+ | |||
+ | def data_receiver(sock, | ||
+ | global latest_matrix, | ||
+ | while True: | ||
+ | data = b'' | ||
+ | while len(data) < FRAME_SIZE: | ||
+ | try: | ||
+ | packet = sock.recv(FRAME_SIZE - len(data)) | ||
+ | except socket.timeout: | ||
+ | continue | ||
+ | if not packet: | ||
+ | print(" | ||
+ | return | ||
+ | data += packet | ||
+ | timestamp_us = struct.unpack('< | ||
+ | distances = struct.unpack('< | ||
+ | matrix = np.array(distances, | ||
+ | writer.writerow([timestamp_us] + list(matrix.flatten())) | ||
+ | csvfile.flush() | ||
+ | with lock: | ||
+ | latest_matrix = matrix | ||
+ | latest_timestamp = timestamp_us | ||
+ | |||
+ | def update_gui(): | ||
+ | with lock: | ||
+ | matrix = None if latest_matrix is None else latest_matrix.copy() | ||
+ | timestamp = latest_timestamp | ||
+ | if matrix is not None: | ||
+ | matrix = re_range(matrix) | ||
+ | high_res_matrix = zoom(matrix, | ||
+ | im.set_array(high_res_matrix) | ||
+ | ax.set_title(f" | ||
+ | canvas.draw() | ||
+ | root.after(100, | ||
+ | |||
+ | def clean_exit(): | ||
+ | global running | ||
+ | running = False | ||
+ | try: | ||
+ | sock.close() | ||
+ | except Exception: | ||
+ | pass | ||
+ | try: | ||
+ | csvfile.close() | ||
+ | except Exception: | ||
+ | pass | ||
+ | root.destroy() | ||
+ | |||
+ | # Socket connection and main setup | ||
+ | sock = socket.create_connection((ESP32_IP, | ||
+ | sock.settimeout(1.0) | ||
+ | csvfile = open(CSV_FILENAME, | ||
+ | writer = csv.writer(csvfile) | ||
+ | header = [" | ||
+ | writer.writerow(header) | ||
+ | |||
+ | receiver_thread = threading.Thread(target=data_receiver, | ||
+ | receiver_thread.start() | ||
+ | |||
+ | root = tk.Tk() | ||
+ | root.title(" | ||
+ | |||
+ | fig, ax = plt.subplots() | ||
+ | im = ax.imshow(np.zeros((8 * UPSCALE_FACTOR, | ||
+ | canvas = FigureCanvasTkAgg(fig, | ||
+ | canvas.get_tk_widget().pack() | ||
+ | |||
+ | close_button = tk.Button(root, | ||
+ | close_button.pack(pady=10) | ||
+ | |||
+ | root.after(100, | ||
+ | root.protocol(" | ||
+ | root.mainloop() | ||
+ | </ | ||
+ | |||
+ | **Graphic result** | ||
+ | {{ : | ||
===== Results ===== | ===== Results ===== | ||
Line 667: | Line 791: | ||
* The system is resilient to network disconnects, | * The system is resilient to network disconnects, | ||
+ | ==== Pictures of the prototype ==== | ||
+ | < | ||
+ | < | ||
+ | < | ||
+ | |||
+ | ==== Data analysis ==== | ||
+ | |||
+ | **Data Cleaning:** | ||
+ | Selection of a segment of interest, discarding outliers or unreliable data. | ||
+ | Due some hardware limitations, | ||
+ | The expected data is 12 marks readings, but due to problems in the data acquisition there were just 8 marks usable, and there were also some duplicated readings, this was determined manually based on the time and pattern expected. | ||
+ | |||
+ | **Path Segmentation: | ||
+ | getPath(df) constructs segments (" | ||
+ | |||
+ | **Speed Calculation: | ||
+ | mean_speed(data) estimates the average speed between time marks by measuring intervals between zeros. | ||
+ | |||
+ | * After interpolating sensor data along the location axis (temporal/ | ||
+ | |||
+ | * This spatial interpolation significantly enhances intra-frame resolution. | ||
+ | |||
+ | * The aggregation step then merges these larger frames horizontally with value averaging over overlapping columns, preserving continuity. | ||
+ | |||
+ | * The plot displays a much higher-resolution heatmap representing the sensor data over the scanned path. | ||
+ | |||
+ | <code python> | ||
+ | import pandas as pd | ||
+ | import matplotlib.pyplot as plt | ||
+ | import numpy as np | ||
+ | from scipy.ndimage import zoom | ||
+ | from scipy.interpolate import interp1d | ||
+ | |||
+ | fname1 = " | ||
+ | df = pd.read_csv(fname1) | ||
+ | #Since the marks acquisition is not reliable enough, the data has to be treated manually to discard unuseful data | ||
+ | dataFrame = df.loc[1419: | ||
+ | |||
+ | def getPath(df): | ||
+ | idx = list(df.loc[df[" | ||
+ | path_list = [] | ||
+ | for i in range(len(idx)): | ||
+ | if (i+1)%4 == 0 and i != 0: | ||
+ | path_list.append(df.loc[idx[i-3]: | ||
+ | # | ||
+ | return path_list | ||
+ | |||
+ | def mean_speed(data): | ||
+ | idx = list(data.loc[data[" | ||
+ | t1 = data.loc[idx[1]][" | ||
+ | t2 = data.loc[idx[2]][" | ||
+ | t3 = data.loc[idx[3]][" | ||
+ | spd1 = 200/t1 | ||
+ | spd2 = 1000/t2 | ||
+ | spd3 = 1000/t3 | ||
+ | return (spd1+spd2+spd3)/ | ||
+ | |||
+ | paths = getPath(dataFrame) | ||
+ | |||
+ | # 1. Calculate continuous locations and concatenate all paths as before: | ||
+ | for i in range(len(paths)): | ||
+ | loc = (paths[i][" | ||
+ | paths[i] = pd.concat([paths[i], | ||
+ | paths[i] = paths[i].drop(list(paths[i].loc[paths[i][" | ||
+ | |||
+ | path_t = pd.concat(paths) | ||
+ | path_t = path_t.sort_values(" | ||
+ | |||
+ | locations = path_t[' | ||
+ | data_values = path_t.iloc[:, | ||
+ | |||
+ | # 2. Interpolate sensor columns independently on a uniform location grid: | ||
+ | min_loc, max_loc = np.min(locations), | ||
+ | num_interp_points = int(np.ceil(max_loc - min_loc)) + 1 | ||
+ | interp_locations = np.linspace(min_loc, | ||
+ | |||
+ | interp_data = np.zeros((num_interp_points, | ||
+ | for col in range(data_values.shape[1]): | ||
+ | interp_func = interp1d(locations, | ||
+ | interp_data[:, | ||
+ | |||
+ | # 3. Reshape each row into 8x8 frames: | ||
+ | num_frames = interp_data.shape[0] | ||
+ | frame_height, | ||
+ | frames_8x8 = [interp_data[i].reshape(frame_height, | ||
+ | |||
+ | # 4. Interpolate each 8x8 frame to 64x64 using scipy.ndimage.zoom: | ||
+ | zoom_factor = 64 / 8 # 8x to 64x scaling | ||
+ | |||
+ | frames_64x64 = [zoom(frame, | ||
+ | |||
+ | # 5. Aggregate frames horizontally with averaging over overlaps (same as before): | ||
+ | max_offset = num_frames - 1 | ||
+ | final_width = max_offset + 64 # width after scaling frames to 64 wide | ||
+ | final_frame_64 = np.zeros((64, | ||
+ | count_64 = np.zeros((64, | ||
+ | |||
+ | for i, frame in enumerate(frames_64x64): | ||
+ | offset = i | ||
+ | final_frame_64[:, | ||
+ | count_64[:, offset: | ||
+ | |||
+ | aggregated_64 = np.divide(final_frame_64, | ||
+ | |||
+ | # 6. Plot the aggregated 64x wide frame: | ||
+ | plt.figure(figsize=(final_width / 16, 8)) # Adjust size for clarity | ||
+ | plt.imshow(aggregated_64, | ||
+ | plt.colorbar(label=' | ||
+ | plt.title(" | ||
+ | plt.xlabel(' | ||
+ | plt.ylabel(' | ||
+ | plt.show() | ||
+ | </ | ||
- | ==== Discussion ==== | + | {{ : |
+ | ===== Discussion | ||
* This system showcases the potential of integrating low-cost sensor networks and automation for sustainable environmental stewardship: | * This system showcases the potential of integrating low-cost sensor networks and automation for sustainable environmental stewardship: |
amc/ss2025/group-a/start.1753774999.txt.gz · Last modified: 2025/07/29 09:43 by 35120_students.hsrw