I am working on a project where I want to display the live video from an external camera to the Flutter application. I'm using websockets to try and achieve this.

This is the code of the websocket server made in python

import websockets
import asyncio

import mediapipe as mp
import cv2, struct, pickle, base64

mp_face_detection = mp.solutions.face_detection
mp_draw = mp.solutions.drawing_utils

face_detection = mp_face_detection.FaceDetection(min_detection_confidence=0.7)

port = 5000

def draw_bbox(res, frame):
    for id, det in enumerate(res.detections):
        # mp_draw.draw_detection(frame, det)  #? Direct Method for drawing bounding box and feature points

        coord = det.location_data.relative_bounding_box
        ih, iw, ic = frame.shape

        bbox =  int(coord.xmin * iw), int(coord.ymin * ih), \
                int(coord.width * iw), int(coord.height * ih)

        cv2.rectangle(frame, bbox, (255, 0, 255), 2)
        cv2.putText(
            frame,
            f'{int(det.score[0]*100)}%',
            (bbox[0], bbox[1] - 20),
            cv2.FONT_HERSHEY_PLAIN,
            2,
            (0, 255, 0),
            2
        )

print("Started server on port : ", port)

async def transmit(websocket, path):
    print("Client Connected !")
    try :
        cap = cv2.VideoCapture(0)

        while cap.isOpened():
            img, frame = cap.read()

            rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)

            res = face_detection.process(rgb)

            if res.detections:
                draw_bbox(res, frame)

            a = pickle.dumps(frame)

            msg = struct.pack("Q", len(a)) + a
            await websocket.send(msg)

            # cv2.imshow("Transimission", frame)

            # if cv2.waitKey(1) & 0xFF == ord('q'):
            #     break

    except websockets.connection.ConnectionClosed as e:
        print("Client Disconnected !")
        cap.release()
    # except:
    #     print("Someting went Wrong !")
start_server = websockets.serve(transmit, port=port)

asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()

cap.release()

This works fine and sends the frames of images encoded in bytes on other python clients

This is the code for Flutter where I want to read and display these frames live

class _MainPageState extends State<MainPage> {
  static const String url = "ws://<network_ipv4>:5000";
  WebSocketChannel _channel = WebSocketChannel.connect(Uri.parse(url));

  void _connectToWebsocket() {
    _channel = WebSocketChannel.connect(Uri.parse(url));
  }

  void _disconnectToWebsocket() {
    _channel.sink.close();
  }

  @override
  Widget build(BuildContext context) {
    return MaterialApp(
      home: Scaffold(
        appBar: AppBar(
          title: const Text("Live Video"),
        ),
        body: Padding(
          padding: const EdgeInsets.all(20.0),
          child: Column(
            children: [
              Row(
                children: [
                  ElevatedButton(
                      onPressed: _connectToWebsocket,
                      child: const Text("Force Connection")),
                  const SizedBox(
                    width: 120.0,
                  ),
                  ElevatedButton(
                      onPressed: _disconnectToWebsocket,
                      child: const Text("Disconnect")),
                ],
              ),
              StreamBuilder(
                stream: _channel.stream,
                builder: (context, snapshot) {
                  return snapshot.hasData
                      ? Image.memory(
                          base64Decode(snapshot.data.toString()),
                        )
                      : const Center(
                          child: Text("No Data"),
                        );
                },
              )
            ],
          ),
        ),
      ),
    );
  }
}

Please help me out here


Solution 1: Mitrajeet Golsangi

I've solved the issue. The problem was that after conversion to the bas64 string the python interpreter added the string in b''. Thus the dart compiler was not able to understand it. This is the working server code

import websockets
import asyncio

import cv2, base64

print("Started server on port : ", port)

async def transmit(websocket, path):
    print("Client Connected !")
    try :
        cap = cv2.VideoCapture(0)

        while cap.isOpened():
            _, frame = cap.read()

            encoded = cv2.imencode('.jpg', frame)[1]

            data = str(base64.b64encode(encoded))
            data = data[2:len(data)-1]

            await websocket.send(data)

            cv2.imshow("Transimission", frame)

            if cv2.waitKey(1) & 0xFF == ord('q'):
                break
    except websockets.connection.ConnectionClosed as e:
        print("Client Disconnected !")
start_server = websockets.serve(transmit, port=port)

asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()

and the Flutter application Client code if anyone wants to build a similar application

import 'dart:convert';
import 'dart:typed_data';

import 'package:flutter/material.dart';
import 'package:patrolling_robot/src/styles/styles.dart';
import 'package:web_socket_channel/io.dart';
import 'package:web_socket_channel/web_socket_channel.dart';

class MainPage extends StatefulWidget {
  const MainPage({Key? key}) : super(key: key);

  @override
  State<MainPage> createState() => _MainPageState();
}

class _MainPageState extends State<MainPage> {
  static const String url = "ws://<network_ipv4>:5000";
  WebSocketChannel? _channel;
  bool _isConnected = false;

  void connect() {
    _channel = IOWebSocketChannel.connect(Uri.parse(url));
    setState(() {
      _isConnected = true;
    });
  }

  void disconnect() {
    _channel!.sink.close();
    setState(() {
      _isConnected = false;
    });
  }

  @override
  Widget build(BuildContext context) {
    return MaterialApp(
      darkTheme: ThemeData(brightness: Brightness.dark),
      themeMode: ThemeMode.dark,
      home: Scaffold(
        appBar: AppBar(
          title: const Text("Live Video"),
        ),
        body: Padding(
          padding: const EdgeInsets.all(20.0),
          child: Center(
            child: Column(
              children: [
                Row(
                  mainAxisAlignment: MainAxisAlignment.spaceBetween,
                  children: [
                    ElevatedButton(
                      onPressed: connect,
                      style: buttonStyle,
                      child: const Text("Connect"),

                    ),
                    ElevatedButton(
                      onPressed: disconnect,
                      style: buttonStyle,
                      child: const Text("Disconnect"),
                    ),
                  ],
                ),
                const SizedBox(
                  height: 50.0,
                ),
                _isConnected
                    ? StreamBuilder(
                        stream: _channel!.stream,
                  builder: (context, snapshot) {
                    if (!snapshot.hasData) {
                      return const CircularProgressIndicator();
                    }

                    if (snapshot.connectionState == ConnectionState.done) {
                      return const Center(
                        child: Text("Connection Closed !"),
                      );
                    }
                    //? Working for single frames
                    return Image.memory(
                      Uint8List.fromList(
                        base64Decode(
                          (snapshot.data.toString()),
                        ),
                      ),
                      gaplessPlayback: true,

                    );
                  },
                      )
                    : const Text("Initiate Connection")
              ],
            ),
          ),
        ),
      ),
    );
  }
}