|
| 1 | +#!/usr/bin/python3 |
| 2 | + |
| 3 | +# This shows how to use the remote module to detect motion in an image. |
| 4 | +# It estimates the motion of the image and draws it on the image. |
| 5 | + |
| 6 | +import queue |
| 7 | +import threading |
| 8 | + |
| 9 | +import cv2 |
| 10 | +import numpy as np |
| 11 | + |
| 12 | +import picamera2 |
| 13 | +from picamera2 import MappedArray, Process, RemoteMappedArray |
| 14 | + |
| 15 | +BLOCK_SIZE = 32 |
| 16 | +SEARCH_SIZE = 16 |
| 17 | +STEP_SIZE = 8 |
| 18 | + |
| 19 | +last_frame = None |
| 20 | +motion_map = None |
| 21 | + |
| 22 | + |
| 23 | +def run(request): |
| 24 | + global last_frame |
| 25 | + |
| 26 | + if last_frame is None: |
| 27 | + last_frame = request.make_array("main") |
| 28 | + return None |
| 29 | + |
| 30 | + with RemoteMappedArray(request, "main") as m: |
| 31 | + motion_map = calculate_motion(last_frame, m.array) |
| 32 | + |
| 33 | + last_frame = request.make_array("main") |
| 34 | + |
| 35 | + return motion_map |
| 36 | + |
| 37 | + |
| 38 | +def return_thread(futures): |
| 39 | + global motion_map |
| 40 | + |
| 41 | + while True: |
| 42 | + future = futures.get() |
| 43 | + motion_map = future.result() |
| 44 | + |
| 45 | + |
| 46 | +def calculate_motion(frame1, frame2): |
| 47 | + motion_map = np.zeros((frame1.shape[0] // BLOCK_SIZE, frame1.shape[1] // BLOCK_SIZE, 2), dtype=np.int8) |
| 48 | + for block_x in range(0, frame1.shape[1], BLOCK_SIZE): |
| 49 | + for block_y in range(0, frame1.shape[0], BLOCK_SIZE): |
| 50 | + block = frame1[block_y:block_y + BLOCK_SIZE, block_x:block_x + BLOCK_SIZE] |
| 51 | + min_diff = np.inf |
| 52 | + max_diff = 0 |
| 53 | + for offset_x in range(-SEARCH_SIZE, SEARCH_SIZE + 1, STEP_SIZE): |
| 54 | + if block_x + offset_x < 0 or block_x + offset_x + BLOCK_SIZE >= frame2.shape[1]: |
| 55 | + continue |
| 56 | + for offset_y in range(-SEARCH_SIZE, SEARCH_SIZE + 1, STEP_SIZE): |
| 57 | + if block_y + offset_y < 0 or block_y + offset_y + BLOCK_SIZE >= frame2.shape[0]: |
| 58 | + continue |
| 59 | + block2 = frame2[block_y + offset_y:block_y + offset_y + BLOCK_SIZE, |
| 60 | + block_x + offset_x:block_x + offset_x + BLOCK_SIZE] |
| 61 | + diff = np.sum((block - block2)**2) |
| 62 | + if diff < min_diff: |
| 63 | + min_diff = diff |
| 64 | + min_offset = (offset_x, offset_y) |
| 65 | + if diff > max_diff: |
| 66 | + max_diff = diff |
| 67 | + |
| 68 | + if max_diff < 4 * min_diff: |
| 69 | + motion_map[block_y // BLOCK_SIZE, block_x // BLOCK_SIZE] = (0, 0) |
| 70 | + else: |
| 71 | + motion_map[block_y // BLOCK_SIZE, block_x // BLOCK_SIZE] = min_offset |
| 72 | + |
| 73 | + return motion_map |
| 74 | + |
| 75 | + |
| 76 | +def draw_motion_map(request): |
| 77 | + if motion_map is None: |
| 78 | + return |
| 79 | + |
| 80 | + with MappedArray(request, "main") as m: |
| 81 | + for block_x in range(0, motion_map.shape[1]): |
| 82 | + for block_y in range(0, motion_map.shape[0]): |
| 83 | + mid_x = block_x * BLOCK_SIZE + BLOCK_SIZE // 2 |
| 84 | + mid_y = block_y * BLOCK_SIZE + BLOCK_SIZE // 2 |
| 85 | + offset_x, offset_y = motion_map[block_y, block_x] |
| 86 | + cv2.arrowedLine(m.array, (mid_x, mid_y), (mid_x + offset_x, mid_y + offset_y), (0, 0, 255), 2) |
| 87 | + |
| 88 | + |
| 89 | +if __name__ == "__main__": |
| 90 | + picam2 = picamera2.Picamera2() |
| 91 | + config = picam2.create_preview_configuration() |
| 92 | + config["buffer_count"] = 2 |
| 93 | + picam2.configure(config) |
| 94 | + picam2.post_callback = draw_motion_map |
| 95 | + picam2.start_preview(picamera2.Preview.QTGL) |
| 96 | + picam2.start() |
| 97 | + |
| 98 | + process = Process(run, picam2) |
| 99 | + |
| 100 | + futures = queue.Queue() |
| 101 | + return_thread = threading.Thread(target=return_thread, args=(futures,)) |
| 102 | + return_thread.start() |
| 103 | + |
| 104 | + for _ in range(1000): |
| 105 | + with picam2.captured_request() as request: |
| 106 | + future = process.send(request) |
| 107 | + futures.put(future) |
| 108 | + |
| 109 | + return_thread.join() |
0 commit comments