再请教以下,如果像通过mqtt把mjpg数据发出去一般需要怎么写,我看socket http 直接socket send
不过
cframe=img.compressed(quality=35)
client.publish("openmv/test", cframe)
#print(gc.mem_free())
time.sleep(1000)
报报错,image没有len
再请教以下,如果像通过mqtt把mjpg数据发出去一般需要怎么写,我看socket http 直接socket send
不过
cframe=img.compressed(quality=35)
client.publish("openmv/test", cframe)
#print(gc.mem_free())
time.sleep(1000)
报报错,image没有len
256KB .DATA/.BSS/Heap/Stack
512KB Frame Buffer/Stack
256 KB DMA Buffers
(1MB Total)
把数组定义在512kb内可以么,还是其实资源基本已经用完了,红外只有160*120
应该是堆和栈的问题,就是如何顶一个大数组在堆里 ,160*120总应该可以的把
# Lepton Get Object Temp Example
import sensor, image, time, math,network,usocket,sys,json,gc
from mqtt import MQTTClient
SSID='' # Network SSID
KEY='' # Network key
# Color Tracking Thresholds (Grayscale Min, Grayscale Max)
threshold_list = [(200, 255)]
# Set the target temp range here
min_temp_in_celsius = 20.0
max_temp_in_celsius = 40.0
print("Resetting Lepton...")
# These settings are applied on reset
sensor.reset()
sensor.ioctl(sensor.IOCTL_LEPTON_SET_MEASUREMENT_MODE, True)
sensor.ioctl(sensor.IOCTL_LEPTON_SET_MEASUREMENT_RANGE, min_temp_in_celsius, max_temp_in_celsius)
print("Lepton Res (%dx%d)" % (sensor.ioctl(sensor.IOCTL_LEPTON_GET_WIDTH),
sensor.ioctl(sensor.IOCTL_LEPTON_GET_HEIGHT)))
print("Radiometry Available: " + ("Yes" if sensor.ioctl(sensor.IOCTL_LEPTON_GET_RADIOMETRY) else "No"))
sensor.set_pixformat(sensor.GRAYSCALE)
sensor.set_framesize(sensor.QQVGA)
sensor.skip_frames(100)
# Init wlan module and connect to network
print("Trying to connect... (may take a while)...")
wlan = network.WINC()
wlan.connect(SSID, key=KEY, security=wlan.WPA_PSK)
# We should have a valid IP now via DHCP
print(wlan.ifconfig())
client = MQTTClient("abut", "192.168.101.100", port=1883,user="admin",password="ddd")
client.connect()
clock = time.clock()
# Only blobs that with more pixels than "pixel_threshold" and more area than "area_threshold" are
# returned by "find_blobs" below. Change "pixels_threshold" and "area_threshold" if you change the
# camera resolution. "merge=True" merges all overlapping blobs in the image.
def map_g_to_temp(g):
return ((g * (max_temp_in_celsius - min_temp_in_celsius)) / 255.0) + min_temp_in_celsius
while (True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot()
blob_stats = []
blobs = img.find_blobs(threshold_list, pixels_threshold=200, area_threshold=200, merge=True)
# Collect stats into a list of tuples
for blob in blobs:
blob_stats.append((blob.x(), blob.y(), map_g_to_temp(img.get_statistics(thresholds=threshold_list,
roi=blob.rect()).max())))
img.to_rainbow(color_palette=sensor.PALETTE_IRONBOW) # color it
# Draw stuff on the colored image
for blob in blobs:
img.draw_rectangle(blob.rect())
#img.draw_cross(blob.cx(), blob.cy())
for blob_stat in blob_stats:
img.draw_string(blob_stat[0], blob_stat[1] - 10, "%.2f C" % blob_stat[2], mono_space=False)
buffer=[]
for x in range(0,160):
for y in range(0,80):
ROI=(x,y,1,1)
statistics=img.get_statistics(roi=ROI)
buffer.append( map_g_to_temp(statistics.max()))
print(buffer)
print("FPS %f - Lepton Temp: %f C" % (clock.fps(), sensor.ioctl(sensor.IOCTL_LEPTON_GET_FPA_TEMPERATURE)))
cframe = json.dumps(img)
#cframe=img.compressed(quality=35)
client.publish("openmv/test", img.array())
print(gc.mem_free())
time.sleep(1000)
漏了点
@kidswong999 在 请问一下如果想把摄像头的原始数据通过wifi发送出去怎么操作,而不是仅仅发送一张jpg图片? 中说:
得提供全部的代码我才能测试。。。
mport sensor, image, time, math,network,usocket,sys,json,gc
from mqtt import MQTTClient
SSID='abut' # Network SSID
KEY='tuba1abut' # Network key
threshold_list = [(200, 255)]
min_temp_in_celsius = 20.0
max_temp_in_celsius = 40.0
print("Resetting Lepton...")
sensor.reset()
sensor.ioctl(sensor.IOCTL_LEPTON_SET_MEASUREMENT_MODE, True)
sensor.ioctl(sensor.IOCTL_LEPTON_SET_MEASUREMENT_RANGE, min_temp_in_celsius, max_temp_in_celsius)
print("Lepton Res (%dx%d)" % (sensor.ioctl(sensor.IOCTL_LEPTON_GET_WIDTH),
sensor.ioctl(sensor.IOCTL_LEPTON_GET_HEIGHT)))
print("Radiometry Available: " + ("Yes" if sensor.ioctl(sensor.IOCTL_LEPTON_GET_RADIOMETRY) else "No"))
sensor.set_pixformat(sensor.GRAYSCALE)
sensor.set_framesize(sensor.QQVGA)
sensor.skip_frames(100)
print("Trying to connect... (may take a while)...")
wlan = network.WINC()
wlan.connect(SSID, key=KEY, security=wlan.WPA_PSK)
print(wlan.ifconfig())
client = MQTTClient("abut", "192.168.101.100", port=1883,user="admin",password="ddd")
client.connect()
clock = time.clock()
def map_g_to_temp(g):
return ((g * (max_temp_in_celsius - min_temp_in_celsius)) / 255.0) + min_temp_in_celsius
while (True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot()
blob_stats = []
blobs = img.find_blobs(threshold_list, pixels_threshold=200, area_threshold=200, merge=True)
# Collect stats into a list of tuples
for blob in blobs:
blob_stats.append((blob.x(), blob.y(), map_g_to_temp(img.get_statistics(thresholds=threshold_list,
roi=blob.rect()).max())))
img.to_rainbow(color_palette=sensor.PALETTE_IRONBOW) # color it
# Draw stuff on the colored image
for blob in blobs:
img.draw_rectangle(blob.rect())
#img.draw_cross(blob.cx(), blob.cy())
for blob_stat in blob_stats:
img.draw_string(blob_stat[0], blob_stat[1] - 10, "%.2f C" % blob_stat[2], mono_space=False)
buffer=[]
for x in range(0,160):
for y in range(0,80):
ROI=(x,y,1,1)
statistics=img.get_statistics(roi=ROI)
buffer.append( map_g_to_temp(statistics.max()))
print(buffer)
我感觉应该是内存定义的问题。。。。stm32h7 应该有1m的内存,目前我只是处理160*120的红外图像,不知道有经验的人可以分享一下么
buffer=[]
for x in range(0,160):
for y in range(0,120):
ROI=(x,y,1,1)
statistics=img.get_statistics(roi=ROI)
buffer.append( map_g_to_temp(statistics.max()))
print(buffer)
client.publish("openmv/test", buffer)
但是会爆出MemoryError: memory allocation failed, allocating 16824 bytes 错误,是不是内存溢出了,那么如果想把160*120原始数据通过mqtt传出去用什么办法呢
谢谢用的是openmv4 lepton3.5