可以尝试将KNN算法中的angle_tr权重提高,使其对分类结果的影响更大。可以通过调整KNeighborsClassifier函数中的weights参数实现,将其设置为'distance',表示距离越近的数据点对分类结果的影响越大。同时,可以将angle_tr的值除以10,以增加其权重。具体代码如下:

# 训练KNN分类器
knn = KNeighborsClassifier(n_neighbors=2, weights='distance')
knn.fit(data[['angle1','angle2','angle3', 'angle4','angle5' ,'angle6', "angle7","angle8","angle9","angle10","angle11"]], data['label'])

# 处理视频文件中的每一帧
with mp_pose.Pose(min_detection_confidence=0.3, min_tracking_confidence=0.3) as pose:
    while cap.isOpened():
        # 读取一帧
        ret, frame = cap.read()
        # if ret is None:
        #     continue
        if not ret:
            break

        # 将帧转换为RGB格式
        image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)

        # 处理人体姿势检测
        results = pose.process(image)

        # 判断是否检测到人体
        if results.pose_landmarks:
            # 绘制人体骨架
            mp_drawing.draw_landmarks(
                frame, results.pose_landmarks, mp_pose.POSE_CONNECTIONS)

            # 获取右膝、右踝和右手腕关键点的信息
            right_knee = results.pose_landmarks.landmark[mp_pose.PoseLandmark.RIGHT_KNEE]
            right_ankle = results.pose_landmarks.landmark[mp_pose.PoseLandmark.RIGHT_ANKLE]
            right_wrist = results.pose_landmarks.landmark[mp_pose.PoseLandmark.RIGHT_WRIST]

            # 获取左肩、左肘和左手腕关键点的信息
            left_shoulder = results.pose_landmarks.landmark[mp_pose.PoseLandmark.LEFT_SHOULDER]
            left_elbow = results.pose_landmarks.landmark[mp_pose.PoseLandmark.LEFT_ELBOW]
            left_wrist = results.pose_landmarks.landmark[mp_pose.PoseLandmark.LEFT_WRIST]

            # 获取右肩、右肘和右
import cv2import mediapipe as mpimport mathimport numpy as npimport pandas as pdfrom sklearnneighbors import KNeighborsClassifieri = 0label = poses = poses_a = poses_b = # 初始化MediaPipe的人体姿势模型mp_drawi

原文地址: https://www.cveoy.top/t/topic/eEO6 著作权归作者所有。请勿转载和采集!

免费AI点我,无需注册和登录