Quantcast
Channel: Raspberry Pi Forums
Viewing all articles
Browse latest Browse all 5251

Python • Toggling between two camera streams Raspberry Pi 5

$
0
0
Hello, I've been trying to toggle two video streams for Raspberry Pi 5 a few days now and been unsuccessful. I've found a code online that streams two cameras at once to a browser. Link: https://github.com/chradev/pi-h264-to-browser-streamer/
I modified it to only stream one video and if the toggle button is pressed, it will start the second camera stream and stop the first one. I've been unsuccessful so far as when i run the code, only the first video stream starts (intended), but when the toggle button is pressed the stream would sort of pause, and the second video stream didnt even start. When toggle again the first video stream would start in real-time again (not bad). I have a feeling it is to do with any of these 3 codes i'm sending. One is a python code (the one i run), the html code for the webpage and a javascript file (running the muxer i assume, this is the file im most confused at) :

Python main code:

Code:

import ioimport osimport socketfrom string import Templateimport tornado.ioloopimport tornado.webimport tornado.websocketfrom picamera2 import Picamera2from picamera2.encoders import H264Encoderfrom picamera2.outputs import Output# start configurationserverPort     = 8000frameRate      = 30frameWidth     = 1000frameHeight    = 720frameOffsetX0  = 1020frameOffsetY0  = 880frameOffsetX1  = 1740frameOffsetY1  = 430enableView     = FalseenableSecs0    = TrueenableSecs1    = FalseenableTexts0   = TrueenableTexts1   = TrueenableLines0   = False  # CHANGEDenableLines1   = False  # CHANGEDframehFlip0    = 0framevFlip0    = 0 framePause0    = 0framehFlip1    = 0framevFlip1    = 0framePause1    = 0from subprocess import check_outputhostIPAddr = check_output(['hostname', '-I'], text=True).split()[0]picam20 = Picamera2(0)picam21 = Picamera2(1)toggle_requested = False  # Global flag to signal togglingfull_camera_res = picam20.camera_properties['PixelArraySize']frameOffsetX0m  = full_camera_res[0]frameOffsetY0m  = full_camera_res[1]full_camera_res = picam21.camera_properties['PixelArraySize']frameOffsetX1m  = full_camera_res[0]frameOffsetY1m  = full_camera_res[1]from libcamera import Transformpicam20.configure(picam20.create_video_configuration(main={"size": (frameWidth, frameHeight)},     transform=Transform(hflip=framehFlip0, vflip=framevFlip0) ))picam21.configure(picam21.create_video_configuration(main={"size": (frameWidth, frameHeight)},     transform=Transform(hflip=framehFlip1, vflip=framevFlip1) ))import cv2import timefrom picamera2 import MappedArraycolour = (0, 255, 0)origin = (0, 30)font = cv2.FONT_HERSHEY_SIMPLEXscale = 1thickness = 2def apply_timestamp0(request):    timestamp = time.strftime("%Y-%m-%d %X") + " - Camera: 0"    with MappedArray(request, "main") as m:        if enableTexts0 is True:            cv2.putText(m.array, timestamp, origin, font, scale, colour, thickness)        # Lines disabled for Camera 0def apply_timestamp1(request):    timestamp = time.strftime("%Y-%m-%d %X") + " - Camera: 1"    with MappedArray(request, "main") as m:        if enableTexts1 is True:            cv2.putText(m.array, timestamp, origin, font, scale, colour, thickness)        # Lines disabled for Camera 1picam20.pre_callback = apply_timestamp0picam21.pre_callback = apply_timestamp1from picamera2 import Previewif enableView is True:    picam20.start_preview(Preview.QTGL, x=10, y=40, width=500, height=500)    picam21.start_preview(Preview.QTGL, x=1400, y=540, width=500, height=500)class StreamingOutput(Output):    stream = -1    def __init__(self, stream):        super().__init__()        self.stream = stream        self.loop = None        self.buffer = io.BytesIO()        super(StreamingOutput, self).__init__(stream)    def setLoop(self, loop):        self.loop = loop    def outputframe(self, frame, keyframe=True, timestamp=None, packet=None, audio=None):        self.buffer.write(frame)        if self.loop is not None and camHandler.hasConnections(cam=self.stream):            self.loop.add_callback(callback=camHandler.broadcast,                 cam=self.stream, message=self.buffer.getvalue())        self.buffer.seek(0)        self.buffer.truncate()class camHandler(tornado.websocket.WebSocketHandler):    camera = -1    connsCam0 = []    connsCam1 = []    remoteIP = ""    def open(self, camera):        self.remoteIP = str(self.request.remote_ip)        self.camera = int(camera)        print("[%s] Starting a service: Camera %r (%s)" %               (time.strftime("%Y-%m-%d %X"), self.camera, self.remoteIP))        if self.camera == 0:            self.connsCam0.append(self)        else:            self.connsCam1.append(self)    def on_close(self):        print("[%s] Stopping a service: Camera %r (%s)" %               (time.strftime("%Y-%m-%d %X"), self.camera, self.remoteIP))        if self.camera == 0:            self.connsCam0.remove(self)        else:            self.connsCam1.remove(self)    def on_message(self, message):        pass    @classmethod    def hasConnections(cl, cam):        if cam == 0 and len(cl.connsCam0) == 0:            return False        elif cam == 1 and len(cl.connsCam1) == 0:            return False        return True    @classmethod    async def broadcast(cl, cam, message):        if cam == 0:            conns = cl.connsCam0        else:            conns = cl.connsCam1        for connection in conns:            try:                await connection.write_message(message, True)            except tornado.websocket.WebSocketClosedError:                pass            except tornado.iostream.StreamClosedError:                pass    def check_origin(self, origin):        return TruecurrentCamera = 0  # Global variable for active camera stateclass toggleHandler(tornado.websocket.WebSocketHandler):    def open(self):        print(f"[{time.strftime('%Y-%m-%d %X')}] WebSocket connection opened: {self.request.remote_ip}")        # Send a test message to confirm frontend receives it        self.write_message(json.dumps({"test": "Connection successful"}))    def on_close(self):        print(f"[{time.strftime('%Y-%m-%d %X')}] WebSocket connection closed: {self.request.remote_ip}")    async def on_message(self, message):        global currentCamera        if message == "toggle":            print(f"[{time.strftime('%Y-%m-%d %X')}] Toggle requested by client: {self.request.remote_ip}")            # Toggle camera            if currentCamera == 0:                print(f"[{time.strftime('%Y-%m-%d %X')}] Switching to Camera 1")                streamer0.stop()                streamer1.start()                currentCamera = 1            else:                print(f"[{time.strftime('%Y-%m-%d %X')}] Switching to Camera 0")                streamer1.stop()                streamer0.start()                currentCamera = 0            # Broadcast to clients            for conn in camHandler.connsCam0 + camHandler.connsCam1:                try:                    message = json.dumps({"currentCamera": currentCamera})                    conn.write_message(message)                    print(f"Broadcasted: {message} to {conn.remoteIP}")                except Exception as e:                    print(f"Error broadcasting to {conn.remoteIP}: {e}")                else:                      print(f"Message successfully sent to {conn.remoteIP}")    def check_origin(self, origin):        return Trueimport jsondef ptz_send_data():    return json.dumps([                { 'cam': 0,                  'f': {                      'hor': framehFlip0,                      'ver': framevFlip0,                      'pau': 0                  },                  'e': {                      'txt': enableTexts0,                      'lin': enableLines0,                      'def': False                  },                  'x': {                    'min': int(frameWidth / 2),                    'val': int(frameOffsetX0 + frameWidth / 2),                    'max': int(frameOffsetX0m - frameWidth / 2)                  },                  'y': {                    'min': int(frameHeight / 2),                    'val': int(frameOffsetY0 + frameHeight / 2),                    'max': int(frameOffsetY0m - frameHeight / 2)                  }                },                 { 'cam': 1,                  'f': {                      'hor': framehFlip1,                      'ver': framevFlip1,                      'pau': 0                  },                  'e': {                      'txt': enableTexts1,                      'lin': enableLines1,                      'def': False                  },                  'x': {                    'min': int(frameWidth / 2),                    'val': int(frameOffsetX1 + frameWidth / 2),                    'max': int(frameOffsetX1m - frameWidth / 2)                  },                  'y': {                    'min': int(frameHeight / 2),                    'val': int(frameOffsetY1 + frameHeight / 2),                    'max': int(frameOffsetY1m - frameHeight / 2)                  }                },                { 'cam': 2,                  'x': {                    'min': -1500,                    'val': 0,                    'max': 1500                  },                  'y': {                    'min': -1500,                    'val': 0,                    'max': 1500                  },                  'z': {                    'min': 0.5,                    'val': 1.0,                    'max': 2.0                  }                }                ])class Streamer():    camera = -1    running = False  # Keeps track of encoder state    def __init__(self, camera):        super().__init__()        self.camera = camera        print("[%s] Initializing Streamer for: Camera %r" %               (time.strftime("%Y-%m-%d %X"), self.camera))        self.loop = None        self.output = StreamingOutput(stream=self.camera)        self.encoder = H264Encoder(repeat=True, framerate=frameRate, qp=33)        self.encoder.output = self.output    def setLoop(self, loop):        self.output.setLoop(loop)    def isRunning(self):        return self.running    def start(self):        if not self.running:  # Prevent starting if already running            if self.camera == 0:                picam20.start_recording(self.encoder, self.output)            elif self.camera == 1:                picam21.start_recording(self.encoder, self.output)            self.running = True            print("[%s] Started streaming for Camera %r" %                   (time.strftime("%Y-%m-%d %X"), self.camera))        else:            print("[%s] Encoder for Camera %r is already running" %                   (time.strftime("%Y-%m-%d %X"), self.camera))    def stop(self):        if self.running:  # Only stop if currently running            if self.camera == 0:                picam20.stop_recording()            elif self.camera == 1:                picam21.stop_recording()            self.running = False            print("[%s] Stopped streaming for Camera %r" %                   (time.strftime("%Y-%m-%d %X"), self.camera))        else:            print("[%s] Encoder for Camera %r is not running" %                   (time.strftime("%Y-%m-%d %X"), self.camera))        streamer0 = Streamer(camera=0)streamer1 = Streamer(camera=1)def set_ptz_data(data, skip):        global enableTexts0        global enableTexts1        global enableLines0        global enableLines1        if skip:            return        enableTexts0   = data[0]['e']['txt']        enableTexts1   = data[1]['e']['txt']        enableLines0   = data[0]['e']['lin']        enableLines1   = data[1]['e']['lin']        global framehFlip0        global framevFlip0        global framePause0        global framehFlip1        global framevFlip1        global framePause1        changeFlip = not framehFlip0   == data[0]['f']['hor'] or\                     not framevFlip0   == data[0]['f']['ver'] or\                     not framePause0   == data[0]['f']['pau'] or\                     not framehFlip1   == data[1]['f']['hor'] or\                     not framevFlip1   == data[1]['f']['ver'] or\                     not framePause1   == data[1]['f']['pau']        framehFlip0 = data[0]['f']['hor']        framevFlip0 = data[0]['f']['ver']        framePause0 = data[0]['f']['pau']        framehFlip1 = data[1]['f']['hor']        framevFlip1 = data[1]['f']['ver']        framePause1 = data[1]['f']['pau']        global streamer0        global streamer1        if changeFlip:            if streamer0.isRunning:                 streamer0.stop()            if streamer1.isRunning:                 streamer1.stop()            if framePause0 or framePause1:                return            picam20.configure(picam20.create_video_configuration(main={"size": (frameWidth,                   frameHeight)}, transform=Transform(hflip=framehFlip0, vflip=framevFlip0) ))            picam21.configure(picam21.create_video_configuration(main={"size": (frameWidth,                   frameHeight)}, transform=Transform(hflip=framehFlip1, vflip=framevFlip1) ))            streamer0.start()            streamer1.start()        if data[0]['e']['def'] or data[1]['e']['def']:            global frameOffsetX0            global frameOffsetY0            global frameOffsetX1            global frameOffsetY1            frameOffsetX0  = data[0]['x']['val'] - data[0]['x']['min'] + data[2]['x']['val']            frameOffsetY0  = data[0]['y']['val'] - data[0]['y']['min'] + data[2]['y']['val']            frameOffsetX1  = data[1]['x']['val'] - data[1]['x']['min'] + data[2]['x']['val']            frameOffsetY1  = data[1]['y']['val'] - data[1]['y']['min'] + data[2]['y']['val']        scalerCrop = (            data[0]['x']['val'] - data[0]['x']['min'] + data[2]['x']['val'] + int((frameWidth - frameWidth * data[2]['z']['val']) / 2),             data[0]['y']['val'] - data[0]['y']['min'] + data[2]['y']['val'] + int((frameHeight - frameHeight * data[2]['z']['val']) / 2),             int(frameWidth / data[2]['z']['val']),             int(frameHeight / data[2]['z']['val']))        picam20.set_controls({"ScalerCrop": scalerCrop})        scalerCrop = (            data[1]['x']['val'] - data[1]['x']['min'] + data[2]['x']['val'] + int((frameWidth - frameWidth * data[2]['z']['val']) / 2),            data[1]['y']['val'] - data[1]['y']['min'] + data[2]['y']['val'] + int((frameHeight - frameHeight * data[2]['z']['val']) / 2),            int(frameWidth / data[2]['z']['val']),             int(frameHeight / data[2]['z']['val']))        picam21.set_controls({"ScalerCrop": scalerCrop})# WebSocketHandler for camera PTZ controlclass ptzHandler(tornado.websocket.WebSocketHandler):    connections = []    remoteIP = ""    def open(self):        global enableTexts0        global enableTexts1        global enableLines0        global enableLines1        global framehFlip0        global framevFlip0        global framePause0        global framehFlip1        global framevFlip1        global framePause1        self.remoteIP = str(self.request.remote_ip)        print("[%s] Starting a service: CamPTZ - (%s)" %               (time.strftime("%Y-%m-%d %X"), self.remoteIP))        self.connections.append(self)        # Reset camera offset and size properties        set_ptz_data(json.loads(getFile("data.json")), False)        enableTexts0   = True        enableTexts1   = True        enableLines0   = True        enableLines1   = True#        framehFlip0    = 1#        framevFlip0    = 1#        framePause0    = 0#        framehFlip1    = 1#        framevFlip1    = 1#        framePause1    = 0        scalerCrop = (frameOffsetX0, frameOffsetY0, frameWidth, frameHeight)        picam20.set_controls({"ScalerCrop": scalerCrop})        scalerCrop = (frameOffsetX1, frameOffsetY1, frameWidth, frameHeight)        picam21.set_controls({"ScalerCrop": scalerCrop})        # Send initial data        message = ptz_send_data()        self.broadcast(message)    def on_close(self):        print("[%s] Stopping a service: CamPTZ - (%s)" %               (time.strftime("%Y-%m-%d %X"), self.remoteIP))        self.connections.remove(self)    def on_message(self, message):        data = json.loads(message)        set_ptz_data(data, False)        with open('data.json', 'w') as f:            json.dump(data, f, ensure_ascii=False, indent=2)        self.broadcast(message)    @classmethod    def hasConnections(cl):        if len(cl.connections) == 0:            return False        return True    @classmethod    def broadcast(cl, message):        for connection in cl.connections:            try:                connection.write_message(message)            except tornado.websocket.WebSocketClosedError:                pass            except tornado.iostream.StreamClosedError:                pass    def check_origin(self, origin):        return True    abspath = os.path.abspath(__file__)dname = os.path.dirname(abspath)os.chdir(dname)def getFile(filePath):    file = open(filePath, 'r')    content = file.read()    file.close()    return contentdef templatize(content, replacements):    tmpl = Template(content)    return tmpl.safe_substitute(replacements)mainJs    = templatize(getFile('web/main.js'), {'port': serverPort,               'fps': frameRate, 'width': frameWidth, 'height': frameHeight})set_ptz_data(json.loads(getFile("data.json")), True)import markdownreadmeHtml = markdown.markdown(getFile('../README.md'), extensions=['fenced_code', 'codehilite', 'markdown.extensions.tables'])for item in readmeHtml.split("\n"):    if "https://github.com/chradev/pi-h264-to-browser-streamer/assets/" in item:        strToChange = '<video autoplay loop muted src=' + item[3:-4] + '></video>'        readmeHtml = readmeHtml.replace(item, strToChange)readmeHtml = '<!doctype html><html lang="en"><head><title>Readme.md</title><style>table, th, td {text-align: center; border:1px solid black; border-collapse: collapse;}</style></head><body>' + readmeHtml + '</body></html>'class readmeHandler(tornado.web.RequestHandler):    def get(self):        self.write(readmeHtml)class mainHandler(tornado.web.RequestHandler):    def get(self):        self.set_header('Content-Type', 'text/javascript')        self.write(mainJs)requestHandlers = [    (r"/cam(\d+)/", camHandler),    (r"/ptz/", ptzHandler),    (r"/toggle/", toggleHandler),  # ADDED    (r"/main.js", mainHandler),    (r"/readme", readmeHandler),    (r"/(.*)", tornado.web.StaticFileHandler, {        "path": "web/",        "default_filename": "index.html"    })]print(('[%s] Starting: Dual camera streaming server & web interface on RPi 5'                '\n\t\t\t\t-> with two 8MP RPi cameras v.2 at size: %r/%r px'              '\n\t\t\t\t-> starting up at flip: %r/%r, offset: 0-%r/0-%r px'                '\n\t\t\t\t-> capturing at framerate: %r fps, size: %r/%r px'            '\n\t\t\t\t-> streaming h264 video frame by frame over WebSocket'                         '\n\t\t\t\t=> run browser at address: http://%s:%r/') %     (time.strftime("%Y-%m-%d %X"), frameOffsetX0m, frameOffsetY0m, framehFlip0,      framevFlip0, frameOffsetX0m - frameWidth, frameOffsetY0m - frameHeight,      frameRate, frameWidth, frameHeight, hostIPAddr, serverPort))"""async def toggle_camera():    global toggle_requested    currentCamera = 0  # Default camera (0)    while True:        await tornado.gen.sleep(0.1)  # Check every 100ms        if toggle_requested:            toggle_requested = False  # Reset the toggle flag            # Stop the current camera and start the other            if currentCamera == 0:                print("[%s] Switching to Camera 1" % time.strftime("%Y-%m-%d %X"))                streamer0.stop()                streamer1.start()                currentCamera = 1            else:                print("[%s] Switching to Camera 0" % time.strftime("%Y-%m-%d %X"))                streamer1.stop()                streamer0.start()                currentCamera = 0            # Notify all connected clients about the current camera state            for conn in camHandler.connsCam0 + camHandler.connsCam1:                try:                    conn.write_message(json.dumps({"currentCamera": currentCamera}))                except Exception as e:                    print(f"Error broadcasting toggle state: {e}")"""try:    # Start the default camera (Camera 0)    print("[%s] Starting default camera: Camera 0" % time.strftime("%Y-%m-%d %X"))    streamer0.start()            # Tornado application and event loop setup    application = tornado.web.Application(requestHandlers)    application.listen(serverPort)    loop = tornado.ioloop.IOLoop.current()    # Set the loop for the streamers    streamer0.setLoop(loop)    streamer1.setLoop(loop)    # Add toggle task to Tornado loop    #loop.add_callback(toggle_camera)    # Start the event loop    loop.start()except KeyboardInterrupt:    # Stop both streams cleanly on exit    print("[%s] KeyboardInterrupt received. Stopping cameras." % time.strftime("%Y-%m-%d %X"))    if streamer0.isRunning():        streamer0.stop()    if streamer1.isRunning():        streamer1.stop()    loop.stop()


HTML Code:

Code:

<!doctype html><html lang="en">  <head>    <meta charset="utf-8">    <title>Double RPi camera streamer</title>    <link rel="icon" href="/favicon.ico?v=2" type="image/x-icon"/>    <link rel="stylesheet" type="text/css" href="styles.css">    <script src="jmuxer.min.js"></script>  </head>  <body>    <DIV class="common_table">      <DIV class="common_table_row">        <DIV class="common_table_vid">          <!-- Initial camera stream, only one displayed at a time -->          <video autoplay muted id="stream0" style="display: block;"></video>          <video autoplay muted id="stream1" style="display: none;"></video>        </DIV>      </DIV>      <DIV class="common_table_row">        <!-- Toggle button for switching cameras -->        <div style="text-align: center; margin: 20px;">          <button id="toggle-camera" >Toggle Camera</button>        </div>      </DIV>      <DIV class="common_table_row">        <DIV class="common_table_col">          <div class="sliders" id="streamStage0">            <div style="text-align: center;">                &nbsp;Change X/Y offset values of camera 0:            </div>            <div>              <div>                <label for="xoffset0">&nbsp;X</label>                <input style="width: 100px;" type="range" id="xoffset0" name="xoffset0" min="0" max="1000" value="500" step="10" />                Value: <output id="xvalue0"></output>              </div>              <div>                <label for="yoffset0">&nbsp;Y</label>                <input style="width: 100px;" type="range" id="yoffset0" name="yoffset0" min="0" max="1000" value="500" step="10" />                Value: <output id="yvalue0"></output>              </div>              <div>                &nbsp;                <input type="checkbox" id="text0" name="text0" value="text0" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="text0">Enable texts</label>                &nbsp;&nbsp;&nbsp;&nbsp;                <input type="checkbox" id="line0" name="line0" value="line0" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="line0">Enable lines</label>                &nbsp;&nbsp;&nbsp;                <input type="checkbox" id="defs0" name="defs0" value="defs0" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="defs0">Save as defaults</label>              </div>              <div>                &nbsp;                <input type="checkbox" id="hflip0" name="hflip0" value="hflip0" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="hflip0">Horizontal flip</label>                &nbsp;                <input type="checkbox" id="vflip0" name="vflip0" value="vflip0" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="vflip0">Vertical flip</label>                &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;                <input type="checkbox" id="pause0" name="pause0" value="pause0" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="pause0">Pause stream</label>              </div>            </div>          </div>        </DIV>        <DIV class="common_table_col">          <div class="sliders" id="streamStage1">            <div style="text-align: center;">                &nbsp;Change X/Y offset values of camera 1:            </div>            <div>              <div>                <label for="xoffset1">&nbsp;&nbsp;X</label>                <input style="width: 100px;" type="range" id="xoffset1" name="xoffset1" min="0" max="1000" value="500" step="10" />                Value: <output id="xvalue1"></output>              </div>              <div>                <label for="yoffset1">&nbsp;&nbsp;Y</label>                <input style="width: 100px;" type="range" id="yoffset1" name="yoffset1" min="0" max="1000" value="500" step="10" />                Value: <output id="yvalue1"></output>              </div>              <div>                &nbsp;                <input type="checkbox" id="text1" name="text1" value="text1" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="text1">Enable texts</label>                &nbsp;&nbsp;&nbsp;&nbsp;                <input type="checkbox" id="line1" name="line1" value="line1" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="line1">Enable lines</label>                &nbsp;&nbsp;&nbsp;                <input type="checkbox" id="defs1" name="defs1" value="defs1" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="defs1">Save as defaults</label>              </div>              <div>                &nbsp;                <input type="checkbox" id="hflip1" name="hflip1" value="hflip1" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="hflip1">Horizontal flip</label>                &nbsp;                <input type="checkbox" id="vflip1" name="vflip1" value="vflip1" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="vflip1">Vertical flip</label>                &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;                <input type="checkbox" id="pause1" name="pause1" value="pause1" style="transform: scale(2);" onclick='handleCbClick(this);' />                <label for="pause1">Pause stream</label>              </div>            </div>          </div>        </DIV>      </DIV>      <DIV class="common_table_caption">        <div class="sliders" id="streamStage">          <div style="text-align: center;">              &nbsp;Change X/Y offset and Zoom values of both cameras:          </div>          <div>            <div>              <label for="xoffset">&nbsp;X</label>              <input style="width: 100px;" type="range" id="xoffset" name="xoffset" min="0" max="1000" value="500" step="10" />              Value: <output id="xvalue"></output>            </div>            <div>              <label for="yoffset">&nbsp;Y</label>              <input style="width: 100px;" type="range" id="yoffset" name="yoffset" min="0" max="1000" value="500" step="10" />              Value: <output id="yvalue"></output>            </div>            <div>              <label for="zoffset">&nbsp;Z</label>              <input style="width: 100px;" type="range" id="zoffset" name="zoffset" min="0.5" max="2.0" value="1.0" step="0.05" />              Value: <output id="zvalue"></output>            </div>          </div>        </div>      </DIV>    </DIV>    <p class="sliders" style="text-align: left; padding: 16px;">        Open to read <a href="./readme" target="_blank">README.md</a> file from <a href="https://github.com/chradev/pi-h264-to-browser-streamer" target="_blank">pi-h264-to-browser-streamer</a> project    </p>  </body>  <script>    let currentCamera = 0; // Default camera      // Persistent WebSocket connection for toggling    const wsToggle = new WebSocket("ws://" + document.location.hostname + ":8000/toggle/");      wsToggle.onopen = function () {      console.log("WebSocket connection established.");    };      wsToggle.onmessage = function (event) {      console.log("Raw message received from backend:", event.data); // Log all raw messages      try {        const data = JSON.parse(event.data);        console.log("Parsed message from backend:", data); // Log parsed messages        if (data.currentCamera !== undefined) {          console.log("Processing currentCamera update...");          currentCamera = data.currentCamera; // Sync with backend state          console.log(`Updated currentCamera to: ${currentCamera}`);          updateCameraDisplay(); // Update visible streams        } else if (data.test) {          console.log("Test message received:", data.test); // Handle test message        } else {          console.warn("Unexpected message format:", data);        }      } catch (error) {        console.error("Error processing WebSocket message:", error);      }    };      wsToggle.onerror = function (error) {      console.error("WebSocket error:", error);    };      wsToggle.onclose = function () {      console.warn("WebSocket connection closed.");    };      // Update displayed stream    function updateCameraDisplay() {      const stream0 = document.getElementById("stream0");      const stream1 = document.getElementById("stream1");        if (currentCamera === 0) {        stream0.style.display = "block";        stream1.style.display = "none";      } else {        stream0.style.display = "none";        stream1.style.display = "block";      }        console.log(`stream0 display: ${stream0.style.display}`);      console.log(`stream1 display: ${stream1.style.display}`);    }      function toggleCamera() {      if (wsToggle.readyState === WebSocket.OPEN) {        wsToggle.send("toggle");        console.log("Toggle request sent to backend.");      } else {        console.error("WebSocket connection is not open.");      }    }      updateCameraDisplay(); // Initialize display  </script>              <script src="sliders.js"></script>  <script src="main.js"></script></html>

Javascript code:

Code:

window.onload = function () {    let currentCamera = 0;      const jmuxer0 = new JMuxer({      node: "stream0",      mode: "video",      flushingTime: 0,      fps: $fps,      width: $width,      height: $height,      debug: false,    });      const jmuxer1 = new JMuxer({      node: "stream1",      mode: "video",      flushingTime: 0,      fps: $fps,      width: $width,      height: $height,      debug: false,    });      const ws0 = new WebSocket("ws://" + document.location.hostname + ":" + $port + "/cam0/");    const ws1 = new WebSocket("ws://" + document.location.hostname + ":" + $port + "/cam1/");      ws0.binaryType = "arraybuffer";    ws1.binaryType = "arraybuffer";      ws0.onmessage = function (event) {      if (!document.hidden && currentCamera === 0) {        jmuxer0.feed({ video: new Uint8Array(event.data) });      }    };      ws1.onmessage = function (event) {      if (!document.hidden && currentCamera === 1) {        jmuxer1.feed({ video: new Uint8Array(event.data) });      }    };      init_sliders($port, $width);  };    
I'm quite lost on this so I'd appreciate any advice. Thank you!

Statistics: Posted by CrabbyPatty — Wed Dec 04, 2024 9:14 am



Viewing all articles
Browse latest Browse all 5251

Trending Articles