ESP8266代码
/*
* 智能语言控制控制,支持天猫、小爱、小度、google Assistent同时控制
* 2021-08-12
* QQ交流群:566565915
* 官网https://bemfa.com
*/
#include
#include // https://github.com/tzapu/WiFiManager
#define server_ip "bemfa.com" //巴法云服务器地址默认即可
#define server_port "8344" //服务器端口,tcp创客云端口8344
//********************需要修改的部分*******************//
#define wifi_name "yuxuan" //WIFI名称,区分大小写,不要写错
#define wifi_password "yuxuan@+" //WIFI密码
String UID = "5e8480057c38c6294784d50620d34247"; //用户私钥,可在控制台获取,修改为自己的UID
String TOPIC = "led002"; //主题名字,可在控制台新建
const int LED_Pin = D2; //单片机LED引脚值,D2是NodeMcu引脚命名方式,其他esp8266型号将D2改为自己的引脚
//**************************************************//
//最大字节数
#define MAX_PACKETSIZE 512
//设置心跳值60s
#define KEEPALIVEATIME 60*1000
//tcp客户端相关初始化,默认即可
WiFiClient TCPclient;
String TcpClient_Buff = "";//初始化字符串,用于接收服务器发来的数据
unsigned int TcpClient_BuffIndex = 0;
unsigned long TcpClient_preTick = 0;
unsigned long preHeartTick = 0;//心跳
unsigned long preTCPStartTick = 0;//连接
bool preTCPConnected = false;
//相关函数初始化
//连接WIFI
void doWiFiTick();
void startSTA();
//TCP初始化连接
void doTCPClientTick();
void startTCPClient();
void sendtoTCPServer(String p);
//led控制函数,具体函数内容见下方
void turnOnLed();
void turnOffLed();
/*
*发送数据到TCP服务器
*/
void sendtoTCPServer(String p){
if (!TCPclient.connected())
{
Serial.println("Client is not readly");
return;
}
TCPclient.print(p);
preHeartTick = millis();//心跳计时开始,需要每隔60秒发送一次数据
}
/*
*初始化和服务器建立连接
*/
void startTCPClient(){
if(TCPclient.connect(server_ip, atoi(server_port))){
Serial.print("\nConnected to server:");
Serial.printf("%s:%d\r\n",server_ip,atoi(server_port));
String tcpTemp=""; //初始化字符串
tcpTemp = "cmd=1&uid="+UID+"&topic="+TOPIC+"\r\n"; //构建订阅指令
sendtoTCPServer(tcpTemp); //发送订阅指令
tcpTemp="";//清空
/*
//如果需要订阅多个主题,可发送 cmd=1&uid=xxxxxxxxxxxxxxxxxxxxxxx&topic=xxx1,xxx2,xxx3,xxx4\r\n
教程:https://bbs.bemfa.com/64
*/
preTCPConnected = true;
TCPclient.setNoDelay(true);
}
else{
Serial.print("Failed connected to server:");
Serial.println(server_ip);
TCPclient.stop();
preTCPConnected = false;
}
preTCPStartTick = millis();
}
/*
*检查数据,发送心跳
*/
void doTCPClientTick(){
//检查是否断开,断开后重连
if(WiFi.status() != WL_CONNECTED) return;
if (!TCPclient.connected()) {//断开重连
if(preTCPConnected == true){
preTCPConnected = false;
preTCPStartTick = millis();
Serial.println();
Serial.println("TCP Client disconnected.");
TCPclient.stop();
}
else if(millis() - preTCPStartTick > 1*1000)//重新连接
startTCPClient();
}
else
{
if (TCPclient.available()) {//收数据
char c =TCPclient.read();
TcpClient_Buff +=c;
TcpClient_BuffIndex++;
TcpClient_preTick = millis();
if(TcpClient_BuffIndex>=MAX_PACKETSIZE - 1){
TcpClient_BuffIndex = MAX_PACKETSIZE-2;
TcpClient_preTick = TcpClient_preTick - 200;
}
}
if(millis() - preHeartTick >= KEEPALIVEATIME){//保持心跳
preHeartTick = millis();
Serial.println("--Keep alive:");
sendtoTCPServer("ping\r\n"); //发送心跳,指令需\r\n结尾,详见接入文档介绍
}
}
if((TcpClient_Buff.length() >= 1) && (millis() - TcpClient_preTick>=200))
{
TCPclient.flush();
Serial.print("Rev string: ");
TcpClient_Buff.trim(); //去掉首位空格
Serial.println(TcpClient_Buff); //打印接收到的消息
String getTopic = "";
String getMsg = "";
if(TcpClient_Buff.length() > 15){//注意TcpClient_Buff只是个字符串,在上面开头做了初始化 String TcpClient_Buff = "";
//此时会收到推送的指令,指令大概为 cmd=2&uid=xxx&topic=light002&msg=off
int topicIndex = TcpClient_Buff.indexOf("&topic=")+7; //c语言字符串查找,查找&topic=位置,并移动7位,不懂的可百度c语言字符串查找
int msgIndex = TcpClient_Buff.indexOf("&msg=");//c语言字符串查找,查找&msg=位置
getTopic = TcpClient_Buff.substring(topicIndex,msgIndex);//c语言字符串截取,截取到topic,不懂的可百度c语言字符串截取
getMsg = TcpClient_Buff.substring(msgIndex+5);//c语言字符串截取,截取到消息
Serial.print("topic:------");
Serial.println(getTopic); //打印截取到的主题值
Serial.print("msg:--------");
Serial.println(getMsg); //打印截取到的消息值
}
if(getMsg == "on"){ //如果是消息==打开
turnOnLed();
}else if(getMsg == "off"){ //如果是消息==关闭
turnOffLed();
}
TcpClient_Buff="";
TcpClient_BuffIndex = 0;
}
}
/*
*初始化wifi连接
*/
void startSTA(){
WiFi.disconnect();
WiFi.mode(WIFI_STA);
WiFi.begin(wifi_name, wifi_password);
}
/**************************************************************************
WIFI
***************************************************************************/
/*
WiFiTick
检查是否需要初始化WiFi
检查WiFi是否连接上,若连接成功启动TCP Client
控制指示灯
*/
void doWiFiTick(){
static bool startSTAFlag = false;
static bool taskStarted = false;
static uint32_t lastWiFiCheckTick = 0;
if (!startSTAFlag) {
startSTAFlag = true;
startSTA();
}
//未连接1s重连
if ( WiFi.status() != WL_CONNECTED ) {
if (millis() - lastWiFiCheckTick > 1000) {
lastWiFiCheckTick = millis();
}
}
//连接成功建立
else {
if (taskStarted == false) {
taskStarted = true;
Serial.print("\r\nGet IP Address: ");
Serial.println(WiFi.localIP());
startTCPClient();
}
}
}
//打开灯泡
void turnOnLed(){
Serial.println("Turn ON...");
digitalWrite(2, LOW);
digitalWrite(0, LOW);
Serial.println("Turn ON OVER");
}
//关闭灯泡
void turnOffLed(){
Serial.println("Turn OFF...");
digitalWrite(0, HIGH);
digitalWrite(2, HIGH);
Serial.println("Turn OFF over");
}
// 初始化,相当于main 函数
void setup() {
Serial.begin(115200);
WiFi.mode(WIFI_STA);
WiFiManager wm;
bool res;
res = wm.autoConnect("yuxuanLED", "password");
if (!res) {
Serial.println("Failed to connect");
ESP.restart();
}else {
Serial.println("connected...yeey :");
//Blinker.begin(auth, wm.getWiFiSSID().c_str(), wm.getWiFiPass().c_str());
pinMode(0, OUTPUT);
pinMode(2, OUTPUT);
digitalWrite(0, LOW);
digitalWrite(2, LOW);
startTCPClient();
Serial.println("Beginning...");
}
}
//循环
void loop() {
//doWiFiTick();
doTCPClientTick();
}
手机手势识别控制灯的开关代码
import cv2
import math
import socket
import android
import threading
# import tensorflow as tf
import sys
import numpy as np
from blazeface import *
from cvs import *
import aidlite_gpu
aidlite=aidlite_gpu.aidlite()
droid=android.Android()
droid.ttsSpeak('欢迎使用雨轩手势控制系统')
def preprocess_image_for_tflite32(image, model_image_size=300):
print(type(image))
print(image.shape)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = cv2.resize(image, (model_image_size, model_image_size))
image = np.expand_dims(image, axis=0)
image = (2.0 / 255.0) * image - 1.0
image = image.astype('float32')
return image
def plot_detections(img, detections, with_keypoints=True):
output_img = img
print(img.shape)
x_min=[0,0]
x_max=[0,0]
y_min=[0,0]
y_max=[0,0]
hand_nums=len(detections)
# if hand_nums >2:
# hand_nums=2
print("Found %d hands" % hand_nums)
if hand_nums >2:
hand_nums=2
for i in range(hand_nums):
ymin = detections[i][ 0] * img.shape[0]
xmin = detections[i][ 1] * img.shape[1]
ymax = detections[i][ 2] * img.shape[0]
xmax = detections[i][ 3] * img.shape[1]
w=int(xmax-xmin)
h=int(ymax-ymin)
h=max(h,w)
h=h*224./128.
# ymin-=0.08*h
# xmin-=0.25*w
# xmax=xmin+1.5*w;
# ymax=ymin+1.0*h;
x=(xmin+xmax)/2.
y=(ymin+ymax)/2.
xmin=x-h/2.
xmax=x+h/2.
ymin=y-h/2.-0.18*h
ymax=y+h/2.-0.18*h
# if w 0:
# 親指
cv2.line(image, landmark_point[2], landmark_point[3], (0, 255, 0), 2)
cv2.line(image, landmark_point[3], landmark_point[4], (0, 255, 0), 2)
# 人差指
cv2.line(image, landmark_point[5], landmark_point[6], (0, 255, 0), 2)
cv2.line(image, landmark_point[6], landmark_point[7], (0, 255, 0), 2)
cv2.line(image, landmark_point[7], landmark_point[8], (0, 255, 0), 2)
# 中指
cv2.line(image, landmark_point[9], landmark_point[10], (0, 255, 0), 2)
cv2.line(image, landmark_point[10], landmark_point[11], (0, 255, 0), 2)
cv2.line(image, landmark_point[11], landmark_point[12], (0, 255, 0), 2)
# 薬指
cv2.line(image, landmark_point[13], landmark_point[14], (0, 255, 0), 2)
cv2.line(image, landmark_point[14], landmark_point[15], (0, 255, 0), 2)
cv2.line(image, landmark_point[15], landmark_point[16], (0, 255, 0), 2)
# 小指
cv2.line(image, landmark_point[17], landmark_point[18], (0, 255, 0), 2)
cv2.line(image, landmark_point[18], landmark_point[19], (0, 255, 0), 2)
cv2.line(image, landmark_point[19], landmark_point[20], (0, 255, 0), 2)
# 手の平
cv2.line(image, landmark_point[0], landmark_point[1], (0, 255, 0), 2)
cv2.line(image, landmark_point[1], landmark_point[2], (0, 255, 0), 2)
cv2.line(image, landmark_point[2], landmark_point[5], (0, 255, 0), 2)
cv2.line(image, landmark_point[5], landmark_point[9], (0, 255, 0), 2)
cv2.line(image, landmark_point[9], landmark_point[13], (0, 255, 0), 2)
cv2.line(image, landmark_point[13], landmark_point[17], (0, 255, 0), 2)
cv2.line(image, landmark_point[17], landmark_point[0], (0, 255, 0), 2)
num=0
for z in zj:
if handNums[z]["y"] 0:
# handedness.classification[0].index
# handedness.classification[0].score
cv2.circle(image, (cx, cy), 12, (0, 255, 0), 2)
# cv2.putText(image, handedness.classification[0].label[0],
# (cx - 6, cy + 6), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0),
# 2, cv2.LINE_AA) # label[0]:一文字目だけ
return image
def sendCmd(cmd):
# 创建socket
tcp_client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# IP 和端口
server_ip = 'bemfa.com'
server_port = 8344
try:
# 连接服务器
tcp_client_socket.connect((server_ip, server_port))
#发送订阅指令
substr = 'cmd=2&uid=5e8480057c38c6294784d50620d34247&topic=led002&msg='+cmd+'\r\n'
tcp_client_socket.send(substr.encode("utf-8"))
except:
print("连接失败")
input_shape=[128,128]
inShape =[1 * 128 * 128 *3*4,]
outShape= [1 * 896*18*4,1*896*1*4]
model_path="models/palm_detection.tflite"
print('gpu:',aidlite.FAST_ANNModel(model_path,inShape,outShape,4,0))
model_path="models/hand_landmark.tflite"
aidlite.set_g_index(1)
inShape1 =[1 * 224 * 224 *3*4,]
outShape1= [1 * 63*4,1*4,1*4]
print('cpu:',aidlite.FAST_ANNModel(model_path,inShape1,outShape1,4,0))
anchors = np.load('models/anchors.npy').astype(np.float32)
camid=0
cap=cvs.VideoCapture(camid)
bHand=False
x_min=[0,0]
x_max=[0,0]
y_min=[0,0]
y_max=[0,0]
fface=0.0
use_brect=True
num=0
switchState=''
while True:
frame=cvs.read()
if frame is None:
continue
if camid==1:
# frame=cv2.resize(frame,(240,480))
frame=cv2.flip(frame,1)
start_time = time.time()
img = preprocess_image_for_tflite32(frame,128)
# interpreter.set_tensor(input_details[0]['index'], img[np.newaxis,:,:,:])
if bHand==False:
aidlite.set_g_index(0)
aidlite.setTensor_Fp32(img,input_shape[1],input_shape[1])
aidlite.invoke()
# t = (time.time() - start_time)
# # print('elapsed_ms invoke:',t*1000)
# lbs = 'Fps: '+ str(int(1/t))+" ~~ Time:"+str(t*1000) +"ms"
# cvs.setLbs(lbs)
raw_boxes = aidlite.getTensor_Fp32(0)
classificators = aidlite.getTensor_Fp32(1)
detections = blazeface(raw_boxes, classificators, anchors)
x_min,y_min,x_max,y_max=plot_detections(frame, detections[0])
if len(detections[0])>0 :
bHand=True
if bHand:
hand_nums=len(detections[0])
if hand_nums>2:
hand_nums=2
for i in range(hand_nums):
print(x_min,y_min,x_max,y_max)
xmin=max(0,x_min[i])
ymin=max(0,y_min[i])
xmax=min(frame.shape[1],x_max[i])
ymax=min(frame.shape[0],y_max[i])
roi_ori=frame[ymin:ymax, xmin:xmax]
# cvs.imshow(roi)
roi =preprocess_image_for_tflite32(roi_ori,224)
aidlite.set_g_index(1)
aidlite.setTensor_Fp32(roi,224,224)
# start_time = time.time()
aidlite.invoke()
mesh = aidlite.getTensor_Fp32(0)
# ffacetmp = tflite.getTensor_Fp32(1)[0]
# print('fface:',abs(fface-ffacetmp))
# if abs(fface - ffacetmp) > 0.5:
bHand=False
# fface=ffacetmp
# print('mesh:',mesh.shape)
mesh = mesh.reshape(21, 3)/224
cx, cy = calc_palm_moment(roi_ori, mesh)
draw_landmarks(roi_ori,cx,cy,mesh)
# brect = calc_bounding_rect(roi_ori, mesh)
# draw_bounding_rect(use_brect, roi_ori, brect)
# draw_mesh(roi_ori,mesh)
frame[ymin:ymax, xmin:xmax]=roi_ori
t = (time.time() - start_time)
# print('elapsed_ms invoke:',t*1000)
lbs = 'Fps: '+ str(int(100/t)/100.)+" ~~ Time:"+str(t*1000) +"ms"
cvs.setLbs(lbs)
cvs.imshow(frame)
sleep(1)
import apkneed
天猫精灵配置巴法云
打开天猫精灵app,底部找到—内容---->精灵技能—>搜索巴法云,找到巴法云技能,点击“巴法云”进入技能(直接点这三个字,不要点”尝试“),绑定账号。如果巴法云控制台有创建设备,在天猫精灵“我家”中就可以看到设备了。
在技能中心搜索:巴法云。找到巴法云技能,点击“巴法云”进入技能(直接点这三个字,不要点”尝试“),绑定账号,登陆上一步注册的巴法云账号