没识别到图片没有进入到那个if里面,delay不起作用的,
现在只要看到红色就一卡一卡的
T
tvfa 发布的帖子
-
LCD和三个舵机能同时使用吗?一边让舵机动一边看LCD
舵机控制例子
这个例子展示了如何使用OpenMV来控制舵机
import sensor, image, time
from pyb import Servo
import lcd
sensor.reset()
sensor.set_pixformat(sensor.RGB565)
sensor.set_framesize(sensor.QVGA)
sensor.skip_frames(time = 2000)
lcd.init()
s1 = Servo(1) # P7s3 = Servo(3) # P9
lcd.init()
clock = time.clock()
while(True):
img=sensor.snapshot()
lcd.display(sensor.snapshot())
for i in range(1000):
img=sensor.snapshot()
s1.pulse_width(1000 + i)s3.pulse_width(1000 + i) time.sleep_ms(10) for i in range(1000): img=sensor.snapshot() s1.pulse_width(1999 - i) s3.pulse_width(1999 - i) time.sleep_ms(10)
-
模板匹配的模板大小只能是80*60吗,有没有什么办法增加?
# Template Matching Example - Normalized Cross Correlation (NCC) # # This example shows off how to use the NCC feature of your OpenMV Cam to match # image patches to parts of an image... expect for extremely controlled enviorments # NCC is not all to useful. # # WARNING: NCC supports needs to be reworked! As of right now this feature needs # a lot of work to be made into somethin useful. This script will reamin to show # that the functionality exists, but, in its current state is inadequate. import time, sensor, image from image import SEARCH_EX, SEARCH_DS #从imgae模块引入SEARCH_EX和SEARCH_DS。使用from import仅仅引入SEARCH_EX, #SEARCH_DS两个需要的部分,而不把image模块全部引入。 # Reset sensor sensor.reset() # Set sensor settings sensor.set_contrast(1) sensor.set_gainceiling(16) # Max resolution for template matching with SEARCH_EX is QQVGA sensor.set_framesize(sensor.QQVGA) # You can set windowing to reduce the search image. #sensor.set_windowing(((640-80)//2, (480-60)//2, 80, 60)) sensor.set_pixformat(sensor.GRAYSCALE) # Load template. # Template should be a small (eg. 32x32 pixels) grayscale image. template = image.Image("1.pgm") #加载模板图片 clock = time.clock() # Run template matching while (True): clock.tick() img = sensor.snapshot() # find_template(template, threshold, [roi, step, search]) # ROI: The region of interest tuple (x, y, w, h). # Step: The loop step used (y+=step, x+=step) use a bigger step to make it faster. # Search is either image.SEARCH_EX for exhaustive search or image.SEARCH_DS for diamond search # # Note1: ROI has to be smaller than the image and bigger than the template. # Note2: In diamond search, step and ROI are both ignored. r = img.find_template(template, 0.70, step=4, search=SEARCH_EX, roi=(0, 0,307, 233)) #find_template(template, threshold, [roi, step, search]),threshold中 #的0.7是相似度阈值,roi是进行匹配的区域(左上顶点为(10,0),长80宽60的矩形), #注意roi的大小要比模板图片大,比frambuffer小。 #把匹配到的图像标记出来 if r: img.draw_rectangle(r) print(clock.fps())
-
openmv脱机运行控制舵机有时会重启?
import sensor, image, time,lcd,math,pyb import json from pyb import UART from pyb import Servo from pyb import Pin from pyb import LED led = LED(4) a=0 q=0 c=0 H=0 cir_r=0 rec_w=0 rec_h=0 led.toggle() blue_threshold = (18, 38, 9, 58, -90, -33) red_threshold = (29, 53, 34, 74, 15, 55)#(13, 38, 30, 58, 16, 45) yellow_threshold = [(54, 67, -13, 4, 49, 71),(72, 95, -17, 9, 54, 82)] fangkuai_thresholds =[(13, 38, 75, 61, 17, 52),(19, 57, 19, 87, 16, 58)] thresholds = [(13, 38, 6, 61, 17, 52)] sensor.reset() sensor.set_pixformat(sensor.RGB565) sensor.set_framesize(sensor.QVGA) sensor.skip_frames(20) sensor.set_auto_whitebal(False) s2=Servo(2) s1=Servo(1) uart = UART(3, 115200) clock = time.clock() #lcd.init() s1.angle(-55) while(True): clock.tick() a=uart.readchar() led = LED(1) led = LED(4) led.off() if a==49 and c!=2: led = LED(2) led.off() s1=Servo(2) s2=Servo(1) s2.angle(-85) s1.angle(30) pyb.delay(200) if a==52 and c!=2: s2=Servo(2) s1=Servo(1) img = sensor.snapshot() sensor.set_framesize(sensor.QVGA) led = LED(1) led.off() sensor.set_windowing((156,1,117,240)) led = LED(4) led.off() blobs0 = img.find_blobs([blue_threshold],pixels_threshold=100) blobs1 = img.find_blobs([red_threshold],pixels_threshold=40) if blobs1 : led.off() s1.angle(-15) s2.angle(60) pyb.delay(200) s1.angle(70) pyb.delay(200) s1.angle(-15) pyb.delay(200) c=c+1 if a==50 and c!=2: sensor.set_framesize(sensor.QVGA) sensor.set_windowing((165,20,148,100)) s2=Servo(2) s1=Servo(1) led = LED(4) led.on() img = sensor.snapshot() blobs0 = img.find_blobs([blue_threshold],pixels_threshold=100) blobs1 = img.find_blobs([red_threshold],pixels_threshold=40) blobs2 = img.find_blobs(yellow_threshold,pixels_threshold=40) if blobs0 : s1.angle(-15) elif blobs2 : s1.angle(-15) s2.angle(-5) pyb.delay(200) s1.angle(45) pyb.delay(200) s1.angle(-15) pyb.delay(200) elif blobs1: s1.angle(-15) s2.angle(60) pyb.delay(200) s1.angle(45) pyb.delay(200) s1.angle(-15) pyb.delay(200) if c==2: s1.angle(-75) led = LED(2) led.on() sensor.set_framesize(sensor.QVGA) img = sensor.snapshot() sensor.set_windowing((0,0,307,233)) #img.lens_corr(1.8) for code in img.find_qrcodes(): q=code.payload() led = LED(4) led.off() blobs3 = img.find_blobs(fangkuai_thresholds,pixels_threshold=20) if q=='R': uart.write('1') s2.angle(30) print(1) if blobs3: for blob in img.find_blobs(fangkuai_thresholds,pixels_threshold=50): if blob.density()>0.68: uart.write('1') s2.angle(30) led = LED(4) led.on() print(1) if 0.61>blob.density()>0.20: img.draw_keypoints([(blob.cx(), blob.cy(), int(math.degrees(blob.rotation())))], size=20) img.draw_circle((blob.cx(), blob.cy(),int((blob.w()+blob.h())/4))) uart.write('2') s2.angle(30) led = LED(4) led.on() print(2)