在Python3中, socket只能传输bytes格局的信息,达到server端时候再还原回原来的类型。

网上很多用于传输图片的socket代码写的很简单,本文旨在给出一个简洁的demo,如有其余需要可自行扩大。

1. 对于numpy格局的图片:

  • 编码时:

    • 第一步: _, img_encode = cv2.imencode('.jpg', img_numpy)
    • 第二步: img_bytes = img_encode.tobytes()
  • 解码时:

    • 第一步: img_buffer_numpy = np.frombuffer(img_bytes, dtype=np.uint8)
    • 第二步: img_numpy = cv2.imdecode(image_buffer_numpy_data, 1)

2. 对于string格局的信息:

  • 编码时:

    • msg_bytes = msg_str.encode()
  • 解码时:

    • msg_str = msg_bytes.decode()

server.py

import socketimport cv2import numpy as npimport osclass VideoServer:    def __init__(self):        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)  # 初始化        self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)        self.sock.bind(('127.0.0.1', 8002))  # 将套接字绑定到地址        self.sock.listen(1)          def Get(self):        conn, addr = self.sock.accept()        print(addr,'已连贯...')        os.makedirs('./save',exist_ok=True)        while True:            img_data = conn.recv(4073800)   # 这个数字要大于图片的长宽之积,否则会报错            img_name = conn.recv(1024)                     # 将 图片字节码bytes  转换成一维的numpy数组 到缓存中            img_buffer_numpy = np.frombuffer(img_data, dtype=np.uint8)             # 从指定的内存缓存中读取一维numpy数据,并把数据转换(解码)成图像矩阵格局            frame = cv2.imdecode(img_buffer_numpy, 1)                         name = img_name.decode()            cv2.imwrite('./save/'+name, frame)               print('已胜利接管',name)        self.sock.close()       if __name__ == '__main__':    vs = VideoServer()    vs.Get()

client.py

import socketimport cv2import numpyimport timeclass VideoClient:    def __init__(self):        # 连贯服务器(初始化)        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)        self.sock.connect(('127.0.0.1', 8002))            def Send(self):        cap = cv2.VideoCapture('D:/test/ccc/mp4/1.mp4' )        fps = cap.get(cv2.CAP_PROP_FPS)    # 获取视频的帧率               k=0        while cap.isOpened():            success, frame = cap.read()            if success:                k += 1                if k % (fps*4) == 0: # 每隔4s发送一帧图片                    # 将numpy图片转化为bytes字节流                    _, img_encode = cv2.imencode('.jpg', frame)                    img_data = img_encode.tobytes()                                        # 将string字符串转化为bytes字节流                    img_name = (str(k)+'.jpg').encode()                    # 间断发送音讯                    self.sock.send(img_data)                    self.sock.send(img_name)                    time.sleep(1)                    print('已胜利发送%3d.jpg,睡眠1秒'%k)        cap.release()        self.sock.close()if __name__ == '__main__':    vc = VideoClient()    vc.Send()

先启动server.py,再启动client.py,成果如下: