记录调试VS2019上的KINECT过程
参考资料:https://blog.csdn.net/baolinq/article/details/52373574
下载Opencv到本地,运行exe文件进行解压。
此项相当于在代码中宏定义#pragma comment(lib,“xxx.lib”)
后来了解到,dll是动态链接库,其实在这之前都不太了解编译、执行的过程和原理,这里也可以把这个bin文件夹加入环境变量或者把相应的dll文件放在debug目录下(可执行文件同样的目录),动态编译过程大概是.h文件申明函数,然后到.lib文件中寻找函数接口(编译到此结束,执行的时候才有后面的股过程),.lib文件提供函数地址在执行时去寻找.dll动态链接库(存放着函数的定义和实现),调用时,寻找dll文件顺序猜测是:System32、PATH、exe同目录,没有相应dll就会报错。
这里给出这一部分的学习参考链接:https://www.cnblogs.com/azbane/p/7364060.html
https://www.cnblogs.com/405845829qq/p/4108450.html
也可以参考我整理的这部分的总结:https://blog.csdn.net/qq_15036691/article/details/110189014
备注:(解惑)
1.SysWow64 (System Windows On Windows 64)文件夹:SysWow64文件夹,是64位Windows,用来存放32位Windows系统文件的地方;
2.System32文件夹:
在64位系统中:用来存放64位程序文件的地方。
在32位系统中:用来存放32位程序文件的地方。当32位程序加载System32文件夹中的dll时,操作系统会自动映射到SysWow64文件夹中的对应的文件。
调试了很久,也读了很多网上的代码,以及Kinect的Sample源代码,基本的一些操作总结如下:
Kinect中基本的逻辑,不严谨地描述就是,传感器mySensor
来获取myColorSource
、myBodySource
等资源,然后资源用读取器OpenReader来读取到myColorReader
、myBodyReader
。
然后建立myBodyFrame
一帧数据,采集此时此刻读取器读到的图像,然后存入myBodyArr
数组进行后处理。
IKinectSensor* mySensor = nullptr;
GetDefaultKinectSensor(&mySensor);
mySensor->Open();
IColorFrameSource* myColorSource = nullptr;
mySensor->get_ColorFrameSource(&myColorSource);
IColorFrameReader* myColorReader = nullptr;
myColorSource->OpenReader(&myColorReader);
int colorHeight = 0, colorWidth = 0;
IFrameDescription* myDescription = nullptr;
myColorSource->get_FrameDescription(&myDescription);
myDescription->get_Height(&colorHeight);
myDescription->get_Width(&colorWidth);
IColorFrame* myColorFrame = nullptr;
Mat original(colorHeight, colorWidth, CV_8UC4); //定义矩阵
myBodyCount
: IBodyFrameSource* myBodySource = nullptr;
mySensor->get_BodyFrameSource(&myBodySource);
IBodyFrameReader* myBodyReader = nullptr;
myBodySource->OpenReader(&myBodyReader);
int myBodyCount = 0; //身体个数
myBodySource->get_BodyCount(&myBodyCount);
ICoordinateMapper* myMapper = nullptr;
mySensor->get_CoordinateMapper(&myMapper);
IBodyFrame* myBodyFrame = nullptr;
myBodyReader->AcquireLatestFrame(&myBodyFrame) //要判断是否返回值为S_OK
/*如右侧代码*/while (myBodyReader->AcquireLatestFrame(&myBodyFrame) != S_OK);
myBodyArr
: IBody** myBodyArr = new IBody * [myBodyCount];
for (int i = 0; i < myBodyCount; i++)
myBodyArr[i] = nullptr;
myBodyFrame
获取身体骨骼数据存入数组myBodyArr
myBodyFrame->GetAndRefreshBodyData(myBodyCount, myBodyArr) == S_OK
myBodyArr
数组输入到关节数组myJointArr
Joint myJointArr[JointType_Count];
myBodyArr[i]->GetJoints(JointType_Count, myJointArr) == S_OK
myJointArr
的数据结构:以下的枚举类型告诉了我们,方括号内填相应的枚举类型可以得到相应人体关节的关节结构体,然后调用结构体成员(如Position,即可获得位置)
myJointArr[JointType_HandRight].Position;//Example,调用右手掌心的位置坐标
ColorSpacePoint t_point;
myMapper->MapCameraPointToColorSpace(myJointArr[JointType_HandRight].Position, &t_point);
t_point.X;
t_point.Y;
将opencv安装目录\build\include
下的opencv2文件夹整个复制到.\inc
文件夹下;
将C:\Program Files\Microsoft SDKs\Kinect\v2.0_1409\inc
下的所有文件复制到.\inc
文件夹下;
将pthreads安装目录\Pre-built.2\include
下的所有文件复制到.\inc
文件夹下;
将opencv安装目录\build\x64\vc15\lib
下的所有文件复制到.\lib
文件夹下;
将C:\Program Files\Microsoft SDKs\Kinect\v2.0_1409\Lib\x64
下的所有文件复制到.\lib
文件夹下;
将pthreads安装目录\Pre-built.2\lib
下的所有文件复制到.\lib
文件夹下;
右击属性管理器中的Debug|64,选择第一个“添加新的项目属性表”,保存在.\test
(即源代码文件夹下)
(这一步是方便以后调用这个属性配置表,不做也可以)
然后双击这个添加了的属性配置表,在属性配置中,
VC++目录 -> 包含目录:添加.\inc
和.\inc\opencv2
VC++目录 -> 库目录:添加.\lib
链接器 -> 输入 -> 附加依赖项:添加:
kinect20.lib
opencv_world450d.lib
opencv_world450.lib
pthreadVC2.lib
ws2_32.lib
或在代码中写入以下宏定义#pragma comment(lib,“kinect20.lib”)
#pragma comment(lib,“opencv_world450d.lib”)
#pragma comment(lib,“opencv_world450.lib”)
#pragma comment(lib,“pthreadVC2.lib”)
#pragma comment(lib,“ws2_32.lib”)
以上2、3、4两步,其实是为了在其他机器上方便编译用的
.\x64\Debug
中,,这一步是为了方便在其它计算机上运行可执行文件时,不会报错缺少.dll文件。(x64)
opencv
pthread(初次运行可能会报错,解决方法:在pthread.h头文件中如下操作)
#if !defined( PTHREAD_H )
#define PTHREAD_H
下面加上
#define HAVE_STRUCT_TIMESPEC
kinect2.0 SDKs for windows
#include
#include
#include
#include
#include
#include
#include
#include
#include
#pragma comment(lib,"ws2_32.lib")
#include
#include //使用Sleep的头
using namespace std;
using namespace cv;
int g_number = 0;
#define TURN_ERR 100
int ss(string x);
void initialization();
void draw(Mat& img, Joint& r_1, Joint& r_2, ICoordinateMapper* myMapper);
SOCKET s_server;
HandState leftHandState = HandState_Unknown; // 手形
HandState rightHandState = HandState_Unknown;
Point lefthand, righthand;
boolean startmark = FALSE;
boolean quitmark = FALSE;
void* counter1(void* args) //socket
{
int send_len = 0;
SOCKADDR_IN server_addr;
initialization();
server_addr.sin_family = AF_INET;
server_addr.sin_addr.S_un.S_addr = inet_addr("192.168.1.1");
server_addr.sin_port = htons(2222);
s_server = socket(AF_INET, SOCK_STREAM, 0);
if (connect(s_server, (SOCKADDR*)&server_addr, sizeof(SOCKADDR)) == SOCKET_ERROR) {
cout << "服务器连接失败!" << endl;
//return 0;
WSACleanup();
}
else {
cout << "服务器连接成功!" << endl;
}
while (!startmark);
Sleep(100);
while (!quitmark)
{
int right_y_prev = righthand.y;
int right_x_prev = righthand.x;
Sleep(10);
if (leftHandState == HandState_Closed) // motor ctrl
{
if (rightHandState == HandState_Closed) //turn and go
{
if (abs(lefthand.y - righthand.y) <= TURN_ERR) {//差不多高 go
ss("030000");
}
else if (lefthand.y - righthand.y < -TURN_ERR) {//左手高 turn right
ss("020000");
}
else if (lefthand.y - righthand.y > TURN_ERR) {//右手高 turn left
ss("010000");
}
}
else if (rightHandState == HandState_Open) // turn and back
{
if (abs(lefthand.y - righthand.y) <= TURN_ERR) {//差不多高 go
ss("040000");
}
else if (lefthand.y - righthand.y < -TURN_ERR) {//左手高 turn right
ss("020000");
}
else if (lefthand.y - righthand.y > TURN_ERR) {//右手高 turn left
ss("010000");
}
}
else ss("000000");
}
else if (leftHandState == HandState_Lasso) // arm
{
//ss("000000");
if (rightHandState == HandState_Closed)
ss("140000");
else if(rightHandState == HandState_Open)
ss("240000");
/*4*/
Sleep(10);
if (rightHandState == HandState_Lasso) // turn 0x03
{
int err_y = (lefthand.y - righthand.y)/2;
if (err_y < 0) {//左手高 turn right
ss(to_string(230000+abs(err_y)));
}
else if (err_y > 0) {//右手高 turn left
ss(to_string(130000 + abs(err_y)));
}
}
else if (rightHandState == HandState_Open || rightHandState == HandState_Closed) // up down 1 2
{
int err_ry = righthand.y - right_y_prev;
int err_rx = righthand.x - right_x_prev;
err_ry = (abs(err_ry) < 12) ? abs(err_ry) : 12; //limit
err_ry = (err_ry > 1) ? err_ry : 0;
err_ry = int(err_ry / 2);
if (righthand.y - right_y_prev > 1) // up
{
ss(to_string(210000 + err_ry));
printf("21:%d\n",err_ry);
}
else if (righthand.y - right_y_prev < -1) //down
{
ss(to_string(110000 + err_ry));
printf("11:%d\n", err_ry);
}
if (righthand.x - right_x_prev > 2) // right
{
ss("020000");
}
else if (righthand.x - right_x_prev < -2) //left
{
ss("010000");
}
}
}
else if (leftHandState == HandState_Open)
{
ss("000000");
}
else
{
}
}
ss("000000");
Sleep(10);
ss("qq");
Sleep(100);
closesocket(s_server);
WSACleanup();
return 0;
}
void* counter2(void* args) {
startmark = TRUE;
IKinectSensor* mySensor = nullptr;
GetDefaultKinectSensor(&mySensor);
mySensor->Open();
IColorFrameSource* myColorSource = nullptr;
mySensor->get_ColorFrameSource(&myColorSource);
IColorFrameReader* myColorReader = nullptr;
myColorSource->OpenReader(&myColorReader);
int colorHeight = 0, colorWidth = 0;
IFrameDescription* myDescription = nullptr;
myColorSource->get_FrameDescription(&myDescription);
myDescription->get_Height(&colorHeight);
myDescription->get_Width(&colorWidth);
IColorFrame* myColorFrame = nullptr;
Mat original(colorHeight, colorWidth, CV_8UC4, Scalar(0,0,0,0));
cout << colorHeight << ";" << colorWidth << endl;
//**********************以上为ColorFrame的读取前准备**************************
IBodyFrameSource* myBodySource = nullptr;
mySensor->get_BodyFrameSource(&myBodySource);
IBodyFrameReader* myBodyReader = nullptr;
myBodySource->OpenReader(&myBodyReader);
int myBodyCount = 0;
myBodySource->get_BodyCount(&myBodyCount);
IBodyFrame* myBodyFrame = nullptr;
ICoordinateMapper* myMapper = nullptr;
mySensor->get_CoordinateMapper(&myMapper);
ShellExecute(NULL, L"open", L"http://192.168.1.1:8080/javascript_simple.html",NULL, NULL, SW_SHOWNORMAL);
//**********************以上为BodyFrame以及Mapper的准备***********************
while (1)
{
//是否需要彩色图像
//while (myColorReader->AcquireLatestFrame(&myColorFrame) != S_OK);
//myColorFrame->CopyConvertedFrameDataToArray(colorHeight * colorWidth * 4, original.data, ColorImageFormat_Bgra);
Mat copy = original.clone(); //读取彩色图像并输出到矩阵
while (myBodyReader->AcquireLatestFrame(&myBodyFrame) != S_OK); //读取身体图像
IBody** myBodyArr = new IBody * [myBodyCount]; //为存身体数据的数组做准备
for (int i = 0; i < myBodyCount; i++)
myBodyArr[i] = nullptr;
if (myBodyFrame->GetAndRefreshBodyData(myBodyCount, myBodyArr) == S_OK) //把身体数据输入数组
for (int i = 0; i < myBodyCount; i++)
{
BOOLEAN result = false;
if (myBodyArr[i]->get_IsTracked(&result) == S_OK && result) //先判断是否侦测到
{
Joint myJointArr[JointType_Count];
myBodyArr[i]->get_HandLeftState(&leftHandState);
myBodyArr[i]->get_HandRightState(&rightHandState);
if (myBodyArr[i]->GetJoints(JointType_Count, myJointArr) == S_OK) //如果侦测到就把关节数据输入到数组并画图
{
draw(copy, myJointArr[JointType_Head], myJointArr[JointType_Neck], myMapper);
draw(copy, myJointArr[JointType_Neck], myJointArr[JointType_SpineShoulder], myMapper);
draw(copy, myJointArr[JointType_SpineShoulder], myJointArr[JointType_ShoulderLeft], myMapper);
draw(copy, myJointArr[JointType_SpineShoulder], myJointArr[JointType_SpineMid], myMapper);
draw(copy, myJointArr[JointType_SpineShoulder], myJointArr[JointType_ShoulderRight], myMapper);
draw(copy, myJointArr[JointType_ShoulderLeft], myJointArr[JointType_ElbowLeft], myMapper);
draw(copy, myJointArr[JointType_SpineMid], myJointArr[JointType_SpineBase], myMapper);
draw(copy, myJointArr[JointType_ShoulderRight], myJointArr[JointType_ElbowRight], myMapper);
draw(copy, myJointArr[JointType_ElbowLeft], myJointArr[JointType_WristLeft], myMapper);
draw(copy, myJointArr[JointType_SpineBase], myJointArr[JointType_HipLeft], myMapper);
draw(copy, myJointArr[JointType_SpineBase], myJointArr[JointType_HipRight], myMapper);
draw(copy, myJointArr[JointType_ElbowRight], myJointArr[JointType_WristRight], myMapper);
draw(copy, myJointArr[JointType_WristLeft], myJointArr[JointType_ThumbLeft], myMapper);
draw(copy, myJointArr[JointType_WristLeft], myJointArr[JointType_HandLeft], myMapper);
draw(copy, myJointArr[JointType_HipLeft], myJointArr[JointType_KneeLeft], myMapper);
draw(copy, myJointArr[JointType_HipRight], myJointArr[JointType_KneeRight], myMapper);
draw(copy, myJointArr[JointType_WristRight], myJointArr[JointType_ThumbRight], myMapper);
draw(copy, myJointArr[JointType_WristRight], myJointArr[JointType_HandRight], myMapper);
draw(copy, myJointArr[JointType_HandLeft], myJointArr[JointType_HandTipLeft], myMapper);
draw(copy, myJointArr[JointType_KneeLeft], myJointArr[JointType_FootLeft], myMapper);
draw(copy, myJointArr[JointType_KneeRight], myJointArr[JointType_FootRight], myMapper);
draw(copy, myJointArr[JointType_HandRight], myJointArr[JointType_HandTipRight], myMapper);
ColorSpacePoint t_point;
myMapper->MapCameraPointToColorSpace(myJointArr[JointType_HandLeft].Position, &t_point);
lefthand.x = t_point.X;
lefthand.y = t_point.Y;
myMapper->MapCameraPointToColorSpace(myJointArr[JointType_HandRight].Position, &t_point);
righthand.x = t_point.X;
righthand.y = t_point.Y;
// cout << righthand.x << "," << righthand.y << endl; // 坐标系测试:坐标原点是视频左上角,x是到竖轴的距离,y是到横轴的距离
int linethickness = 4;
int circleradius = 50;
if (leftHandState == HandState_Open) {
circle(copy, lefthand, circleradius, Vec3b(0, 255, 0), linethickness);//green = open
}
else if (leftHandState == HandState_Closed) {
circle(copy, lefthand, circleradius, Vec3b(0, 0, 255), linethickness);// red = closed
}
else if (leftHandState == HandState_Lasso) {
circle(copy, lefthand, circleradius, Vec3b(255, 0, 0), linethickness);// blue = lasso
}
if (rightHandState == HandState_Open)
circle(copy, righthand, circleradius, Vec3b(0, 255, 0), linethickness);//green = open
else if (rightHandState == HandState_Closed)
circle(copy, righthand, circleradius, Vec3b(0, 0, 255), linethickness);// red = closed
else if (rightHandState == HandState_Lasso)
circle(copy, righthand, circleradius, Vec3b(255, 0, 0), linethickness);// blue = lasso
}
}
}
delete[]myBodyArr;
myBodyFrame->Release();
//myColorFrame->Release();
namedWindow("TEST",0);
cv::resizeWindow("TEST",1024,576);
cv::imshow("TEST", copy);
if (waitKey(30) == VK_ESCAPE)
{
quitmark = TRUE;
break;
}
}
myMapper->Release();
myDescription->Release();
myColorReader->Release();
myColorSource->Release();
myBodyReader->Release();
myBodySource->Release();
mySensor->Close();
mySensor->Release();
return 0;
}
int main() {
pthread_t t1;
pthread_t t2;
pthread_create(&t1, NULL, counter1, NULL);
pthread_create(&t2, NULL, counter2, NULL);
pthread_join(t1, NULL);
pthread_join(t2, NULL);
return 0;
}
void draw(Mat& img, Joint& r_1, Joint& r_2, ICoordinateMapper* myMapper)
{
//用两个关节点来做线段的两端,并且进行状态过滤
if (r_1.TrackingState == TrackingState_Tracked && r_2.TrackingState == TrackingState_Tracked)
{
ColorSpacePoint t_point; //要把关节点用的摄像机坐标下的点转换成彩色空间的点
Point p_1, p_2;
myMapper->MapCameraPointToColorSpace(r_1.Position, &t_point);
p_1.x = t_point.X;
p_1.y = t_point.Y;
myMapper->MapCameraPointToColorSpace(r_2.Position, &t_point);
p_2.x = t_point.X;
p_2.y = t_point.Y;
line(img, p_1, p_2, Vec3b(0, 255, 0), 2);
circle(img, p_1, 4, Vec3b(0, 255, 0), -1);
circle(img, p_2, 4, Vec3b(0, 255, 0), -1);
}
}
void initialization() {
//初始化套接字库
WORD w_req = MAKEWORD(2, 2);//版本号
WSADATA wsadata;
int err;
err = WSAStartup(w_req, &wsadata);
if (err != 0) {
cout << "初始化套接字库失败!" << endl;
}
else {
cout << "初始化套接字库成功!" << endl;
}
//检测版本号
if (LOBYTE(wsadata.wVersion) != 2 || HIBYTE(wsadata.wHighVersion) != 2) {
cout << "套接字库版本号不符!" << endl;
WSACleanup();
}
else {
cout << "套接字库版本正确!" << endl;
}
//填充服务端地址信息
}
int ss(string x) {
int y;
y = send(s_server, x.c_str(), strlen(x.c_str()), 0);
return y;
}
小R科技树莓派小车
使用socket进行通信
运行时先启动下位机(server端)的脚本
# encoding: utf-8
import chardet
from motol_motion import *
import socket
import time
import os
print("Sever Started.")
#套接字接口
mySocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#设置IP和端口
host = '192.168.1.1'
port = 2222
#bind绑定该端口
mySocket.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
mySocket.bind((host, port))
mySocket.listen(10)
motion_initial()
angle=[0,130,175,90,100,0,0,90,0] ## init
XRservo.XiaoRGEEK_ReSetServo() ##reset angle
os.system("cd ~/work/mjpg-streamer-experimental-xr")
try:
os.system("./mjpg_streamer -i \"./input_raspicam.so\" -o \"./output_http.so -w ./www\"")
except:
print("video was opened or failed")
while True:
#接收客户端连接
print("Wait For Connect....")
client, address = mySocket.accept()
print("Connected.")
print("IP is %s" % address[0])
print("port is %d\n" % address[1])
while True:
#读取消息
msg = client.recv(1024)
stop()
print(msg.decode('EUC-JP'))
#把接收到的数据进行解码
##print(msg.encode("utf-8"))
msg1=msg[0:2]
##print(chardet.detect(msg))
try: ##xx
msg2=int(msg[2:6]) ##xxxx
except:
msg2=0
#print("message received")
if msg == "hello":
print("im server")
elif msg== "1234":
print("re1234")
elif msg1 == b"00": ##00:stop
right_motol(0)
left_motol(0)
elif msg1 == b"01": ##01:left
left_motol(-1)
right_motol(1)
time.sleep(0.02)
elif msg1 == b"02": ##02:right
right_motol(-1)
left_motol(1)
time.sleep(0.02)
elif msg1 == b"03": ##03:go
right_motol(1)
left_motol(1)
time.sleep(0.02)
angle[7]=180
angle[8]=25
set_angle(0x07,angle[7])
set_angle(0x08,angle[8])
elif msg1 == b"04": ##04:back
right_motol(-1)
left_motol(-1)
time.sleep(0.02)
angle[7]=0
angle[8]=25
set_angle(0x07,angle[7])
set_angle(0x08,angle[8])
elif msg1 == b"10":
reset_angle()
elif msg1 == b"11": ##up(inc) 1&2 1:up(130) low(45) 2: up(175) low(135)
if (angle[2]==175):## move 1
if (angle[1]+msg2<130):
angle[1]=angle[1]+msg2
else:
angle[1]=130
angle[8]=10+40*(angle[1]-45)/85
else:
if (angle[2]+msg2<175):
angle[2]=angle[2]+msg2
else:
angle[2]=175
angle[8]=(angle[2]-135)/4
angle[7]=180
set_angle(0x08,angle[8])
set_angle(0x07,angle[7])
set_angle(0x01,angle[1])
set_angle(0x02,angle[2])
elif msg1 == b"21": ##down(dec) 1&2
if (angle[1]==45):## move 2
if (angle[2]-msg2>135):
angle[2]=angle[2]-msg2
else:
angle[2]=135
angle[8]=(angle[2]-135)/4
else:
if (angle[1]-msg2>45):
angle[1]=angle[1]-msg2
else:
angle[1]=45
angle[8]=10+40*(angle[1]-45)/85
angle[7]=180
set_angle(0x08,angle[8])
set_angle(0x07,angle[7])
set_angle(0x01,angle[1])
set_angle(0x02,angle[2])
elif msg1 == b"13": ##left(dec) 3
if (90-msg2)>=0:
angle[3]=90-msg2
else:
angle[3]=0
set_angle(0x03,angle[3])
elif msg1 == b"23": ##right(inc) 3
if (90+msg2)<=180:
angle[3]=90+msg2
else:
angle[3]=180
set_angle(0x03,angle[3])
elif msg1 == b"14": ##he(inc 150) 4 100~150
angle[4] = 150
set_angle(0x04,angle[4])
elif msg1 == b"24": ##zhang(dec 100) 4
angle[4] = 100
set_angle(0x04,angle[4])
elif msg == b"qq":
client.close()
mySocket.close()
print("end\n")
exit()
# motol_motion.py
import RPi.GPIO as GPIO
from smbus import SMBus
XRservo=SMBus(1)
def set_angle(index,angle):
XRservo.XiaoRGEEK_SetServo(index,angle)
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
ENA=13
ENB=20
IN1=19
IN2=16
IN3=21
IN4=26
def motion_initial():
GPIO.setup(ENA,GPIO.OUT,initial=GPIO.LOW)
GPIO.setup(ENB,GPIO.OUT,initial=GPIO.LOW)
GPIO.setup(IN1,GPIO.OUT,initial=GPIO.LOW)
GPIO.setup(IN2,GPIO.OUT,initial=GPIO.LOW)
GPIO.setup(IN3,GPIO.OUT,initial=GPIO.LOW)
GPIO.setup(IN4,GPIO.OUT,initial=GPIO.LOW)
motion_initial()
def gogo():
##print("all go")
GPIO.output(ENA,True)
GPIO.output(IN1,True)
GPIO.output(IN2,False)
GPIO.output(ENB,True)
GPIO.output(IN3,True)
GPIO.output(IN4,False)
def back():
##print("all back")
GPIO.output(ENA,True)
GPIO.output(IN1,False)
GPIO.output(IN2,True)
GPIO.output(ENB,True)
GPIO.output(IN3,False)
GPIO.output(IN4,True)
def stop():
##print("all stop")
GPIO.output(ENA,False)
GPIO.output(IN1,False)
GPIO.output(IN2,False)
GPIO.output(ENB,False)
GPIO.output(IN3,False)
GPIO.output(IN4,False)
def left_motol(x): ##0:stop 1:forward -1:back
GPIO.output(ENA,x)
if x==0:
GPIO.output(IN1,0)
GPIO.output(IN2,0)
##print("left stop")
elif x==1:
GPIO.output(IN1,1)
GPIO.output(IN2,0)
##print("left go")
else:
GPIO.output(IN1,0)
GPIO.output(IN2,1)
##print("left back")
def right_motol(x): ##0:stop 1:forward -1:back
GPIO.output(ENB,x)
if x==0:
GPIO.output(IN3,0)
GPIO.output(IN4,0)
##print("right stop")
elif x==1:
GPIO.output(IN3,1)
GPIO.output(IN4,0)
##print("right go")
else:
GPIO.output(IN3,0)
GPIO.output(IN4,1)
##print("right back")
(未完……)