Unable to Stream Video with GStreamer HTTP Server

You may get better latency with no http server running and just using:

gst-launch-1.0 videotestsrc pattern=ball is-live=1 ! video/x-raw,width=320,height=240,framerate=30/1 ! timeoverlay font-desc="Sans, 24" ! tee name=t ! queue ! autovideosink  t. ! queue ! videoscale ! video/x-raw,width=640,height=480,framerate=30/1 ! theoraenc ! queue ! oggmux ! tcpserversink host=127.0.0.1 port=8080

and test from localhost with:

firefox http://127.0.0.1:8080
cvlc tcp://127.0.0.1:8080

Though, you may better explain your final goal for better advice (resolution, format, framerate, more processing than displaying with VLC on localhost ?).
For lower latency, you would try RTSP or webrtc.
You may try RTSP server such as in this python example:

# Note that this example is using Linux for streaming to localhost.
# Things would be different if not using loopback interface.

import gi
gi.require_version('Gst','1.0')
gi.require_version('GstVideo','1.0')
gi.require_version('GstRtspServer','1.0')
from gi.repository import GLib, Gst, GstVideo, GstRtspServer

# Get MTU for your localhost loopback device with (assuming it is named lo):
# ifconfig lo | grep mtu
# and adjust for your case substracting 68 bytes, eg; 65536 ifconfig reported MTU, using mtu = 65536-68 = 65488
MTU=65488
print('Using MTU=%d' % MTU)

Gst.init(None)

mainloop = GLib.MainLoop()
server = GstRtspServer.RTSPServer()
mounts = server.get_mount_points()

factoryMp2t = GstRtspServer.RTSPMediaFactory()
factoryMp2t_pipeline = ('videotestsrc is-live=1 pattern=ball ! video/x-raw,width=320,height=240,framerate=30/1 ! timeoverlay font-desc="Sans, 24" ! tee name=t ! queue ! autovideosink \
t. ! queue ! videoscale ! video/x-raw,width=640,height=480,pixel-aspect-ratio=1/1 ! videoconvert ! x264enc  insert-vui=1 key-int-max=15 tune=zerolatency ! h264parse ! mpegtsmux ! rtpmp2tpay mtu=%d name=pay0' % MTU)
#print(factoryMp2t_pipeline)
factoryMp2t.set_launch(factoryMp2t_pipeline)
mounts.add_factory("/test_rtp-mp2t", factoryMp2t)

factoryH264 = GstRtspServer.RTSPMediaFactory()
factoryH264_pipeline = ('videotestsrc is-live=1 pattern=ball ! video/x-raw,width=320,height=240,framerate=30/1 ! timeoverlay font-desc="Sans, 24" ! tee name=t ! queue ! autovideosink \
t. ! queue ! videoscale ! video/x-raw,width=640,height=480,pixel-aspect-ratio=1/1 ! videoconvert ! x264enc  insert-vui=1 key-int-max=15 tune=zerolatency ! h264parse ! rtph264pay mtu=%d name=pay0' % MTU)
#print(factoryH264_pipeline)
factoryH264.set_launch(factoryH264_pipeline)
mounts.add_factory("/test_rtp-h264", factoryH264)

factoryJPG = GstRtspServer.RTSPMediaFactory()
factoryJPG_pipeline = ('videotestsrc is-live=1 pattern=ball ! video/x-raw,width=320,height=240,framerate=30/1 ! timeoverlay font-desc="Sans, 24" ! tee name=t ! queue ! autovideosink \
t. ! queue ! videoscale ! video/x-raw,width=640,height=480,pixel-aspect-ratio=1/1 ! videoconvert ! jpegenc ! rtpjpegpay mtu=%d name=pay0' % MTU)
#print(factoryJPG_pipeline)
factoryJPG.set_launch(factoryJPG_pipeline)
mounts.add_factory("/test_rtp-jpg", factoryJPG)

factoryVRAW = GstRtspServer.RTSPMediaFactory()
factoryVRAW_pipeline =  ('videotestsrc is-live=1 pattern=ball ! video/x-raw,width=320,height=240,framerate=30/1 ! timeoverlay font-desc="Sans, 24" ! tee name=t ! queue ! autovideosink \
t. ! queue ! videoscale ! video/x-raw,width=640,height=480,pixel-aspect-ratio=1/1 ! videoconvert ! rtpvrawpay mtu=%d name=pay0' % MTU)
#print(factoryVRAW_pipeline)
factoryVRAW.set_launch(factoryVRAW_pipeline)
mounts.add_factory("/test_rtp-vraw", factoryVRAW)

server.attach(None)

print ("stream ready at rtsp://127.0.0.1:8554/test_rtp-{mp2t , h264 , jpg , vraw}")
mainloop.run()

and you would test with:

# RTP/MP2T would give poor latency without optimizations
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test_rtp-mp2t latency=0 ! rtpmp2tdepay ! tsdemux ! h264parse ! avdec_h264 ! videoconvert ! autovideosink

# RTP/H264 may be much better but may need two frames for P-frames decoding:
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test_rtp-h264 latency=0 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink

# RTP/JPG may be even better as it only needs one frame for decoding:
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test_rtp-jpg latency=0 ! rtpjpegdepay ! jpegdec ! videoconvert ! autovideosink

# RTP/VRAW doesn't encode so not sure for your case but it might also require some kernel max socket buffer size adjustment for high resolutions
sudo sysctl -w net.core.rmem_max=25000000
sudo sysctl -w net.core.wmem_max=25000000
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test_rtp-vraw latency=0 ! rtpvrawdepay ! videoconvert ! autovideosink

Note that this just measures the encoding and RTSP to localhost and decoding latency.
Most of the final latency may be from your camera and bus to system (USB camera?), or from your own processing. Be also sure to know your screen’s display rate for glass-to-glass latency evaluation.

Also be sure that for decreasing latency, increasing framerate is your easiest bet.