Example Projects
4 Complete Runnable Projects
Project 1: Simple Pick and Place
Simple pick and place project demo
Project Goal
Pick object from fixed position and place at target location.
Required Materials
- Complete Delta Robot kit
- Test object (small plastic block, ≤50g)
Complete Code
from serial import Serial
import time
# Connect robot
robot = Serial('COM3', 115200, timeout=1)
time.sleep(2)
def send_gcode(cmd):
robot.write((cmd + '\n').encode())
print(f"Sent: {cmd}")
time.sleep(0.1)
def pick_place():
"""Simple pick and place demo"""
# Object position
object_x, object_y = 30, -20
# Target position
target_x, target_y = -40, 30
# Safe height
safe_z = -160
pick_z = -200
print("Starting pick and place...")
# 1. Home
send_gcode("G28")
time.sleep(3)
# 2. Move above object
send_gcode(f"G0 X{object_x} Y{object_y} Z{safe_z}")
time.sleep(2)
# 3. Descend to pick height
send_gcode(f"G0 Z{pick_z}")
time.sleep(1)
# 4. Suction
send_gcode("M3") # Pump ON
send_gcode("M7") # Valve ON
time.sleep(0.5)
# 5. Rise
send_gcode(f"G0 Z{safe_z}")
time.sleep(1)
# 6. Move to target
send_gcode(f"G0 X{target_x} Y{target_y}")
time.sleep(2)
# 7. Descend
send_gcode(f"G0 Z{pick_z}")
time.sleep(1)
# 8. Release
send_gcode("M9") # Valve OFF
time.sleep(0.3)
# 9. Rise and turn off pump
send_gcode(f"G0 Z{safe_z}")
time.sleep(1)
send_gcode("M5") # Pump OFF
print("Pick and place completed!")
# Run
pick_place()
# Close serial
robot.close()
Expected Result
Object moves from start to target position smoothly without stuttering.
Project 2: Vision-based Sorting
Sort objects by color
Project Goal
Use YOLOv8 to identify different colored objects and sort them to corresponding target areas.
Main Program
import cv2
from ultralytics import YOLO
from serial import Serial
import time
import numpy as np
# Initialize
robot = Serial('COM3', 115200)
model = YOLO('yolov8n.pt')
cap = cv2.VideoCapture(0)
H = np.load('homography_matrix.npy')
# Sorting target positions
sort_bins = {
'red': (-60, 40),
'green': (0, 60),
'blue': (60, 40)
}
def pixel_to_robot(px, py):
"""Convert pixel to robot coordinates"""
point = np.array([[[px, py]]], dtype='float32')
result = cv2.perspectiveTransform(point, H)
return result[0][0][0], result[0][0][1]
def sort_object(color, x, y):
"""Pick and sort to corresponding position"""
target_x, target_y = sort_bins[color]
robot.write(b'G28\n')
time.sleep(2)
# Pick
robot.write(f'G0 X{x:.1f} Y{y:.1f} Z-160\n'.encode())
time.sleep(1)
robot.write(b'G0 Z-200\n')
robot.write(b'M3\n')
robot.write(b'M7\n')
time.sleep(0.5)
robot.write(b'G0 Z-160\n')
# Place
robot.write(f'G0 X{target_x} Y{target_y}\n'.encode())
time.sleep(1)
robot.write(b'G0 Z-200\n')
robot.write(b'M9\n')
time.sleep(0.3)
robot.write(b'G0 Z-160\n')
robot.write(b'M5\n')
# Main loop
print("Color sorting started...")
while True:
ret, frame = cap.read()
if not ret:
break
# Object detection
results = model(frame)
for r in results:
boxes = r.boxes
for box in boxes:
# Get class and position
cls = int(box.cls[0])
x1, y1, x2, y2 = box.xyxy[0].tolist()
cx, cy = int((x1 + x2) / 2), int((y1 + y2) / 2)
# Identify color (by class ID)
color_map = {0: 'red', 1: 'green', 2: 'blue'}
if cls in color_map:
color = color_map[cls]
# Coordinate transform
robot_x, robot_y = pixel_to_robot(cx, cy)
# Execute sorting
print(f"Sorting {color} object at ({robot_x:.1f}, {robot_y:.1f})")
sort_object(color, robot_x, robot_y)
# Display results
annotated = results[0].plot()
cv2.imshow('Color Sorting', annotated)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
robot.close()
Project 3: Conveyor Belt Tracking
Pick from moving conveyor belt
Project Goal
Pick objects from moving conveyor belt, requires motion compensation and prediction algorithms.
Core Technology: Motion Compensation
def predict_position(current_x, current_y, velocity, delay):
"""
Predict object future position
Args:
current_x, current_y: Current position
velocity: Conveyor speed (mm/s)
delay: Robot response delay (s)
Returns:
predicted_x, predicted_y: Predicted position
"""
displacement = velocity * delay
predicted_x = current_x + displacement
predicted_y = current_y # Assuming X-axis only movement
return predicted_x, predicted_y
Project 4: Custom Gripper Integration
Replace with servo-driven mechanical gripper
Project Goal
Replace pneumatic suction cup with servo-driven mechanical gripper for gripping irregular objects.
Firmware Adaptation
// File: esp32/gripper.cpp
#include
Servo gripper;
#define GRIPPER_PIN 16
void setup() {
gripper.attach(GRIPPER_PIN);
gripper.write(90); // Initial position (open)
}
// M7: Close
void gripperClose() {
gripper.write(30); // Close angle
}
// M9: Open
void gripperOpen() {
gripper.write(90); // Open angle
}
Get Complete Code
All example projects' complete code, datasets, and 3D model files can be found in the GitHub repository:
→ GitHub Examples Directory