Showing
7 changed files
with
277 additions
and
0 deletions
code/mosaic-client/client.py
0 → 100644
1 | +import json | ||
2 | +import os | ||
3 | +import requests | ||
4 | +import subprocess | ||
5 | +import time | ||
6 | + | ||
7 | +import cv2 | ||
8 | +import torch | ||
9 | + | ||
10 | +from models.experimental import attempt_load | ||
11 | +from utils.datasets import LoadImages | ||
12 | +from utils.general import check_img_size, non_max_suppression, set_logging, scale_coords | ||
13 | +from utils.torch_utils import select_device, time_synchronized | ||
14 | + | ||
15 | +SERVER_CHECK_ENDPOINT = 'http://mosaic.khunet.net' | ||
16 | +WEIGHT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'weight.pt') | ||
17 | +INPUT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'input.mp4') | ||
18 | +OUTPUT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'output.ts') | ||
19 | + | ||
20 | + | ||
21 | +def download(url, file_path): | ||
22 | + with open(file_path, 'wb') as file: | ||
23 | + res = requests.get(url) | ||
24 | + file.write(res.content) | ||
25 | + | ||
26 | + | ||
27 | +def mosaic(src, ratio=0.07): | ||
28 | + small = cv2.resize(src, None, fx=ratio, fy=ratio) | ||
29 | + return cv2.resize(small, src.shape[:2][::-1], interpolation=cv2.INTER_NEAREST) | ||
30 | + | ||
31 | + | ||
32 | +@torch.no_grad() | ||
33 | +def detect(weight_path, input_path, output_path): | ||
34 | + command = ['ffmpeg', | ||
35 | + '-loglevel', 'panic', | ||
36 | + '-y', | ||
37 | + '-f', 'rawvideo', | ||
38 | + '-pixel_format', 'bgr24', | ||
39 | + '-video_size', "{}x{}".format(1280, 720), | ||
40 | + '-framerate', str(30), | ||
41 | + '-i', '-', | ||
42 | + '-i', input_path, | ||
43 | + '-c:a', 'copy', | ||
44 | + '-map', '0:v:0', | ||
45 | + '-map', '1:a:0', | ||
46 | + '-c:v', 'libx264', | ||
47 | + '-pix_fmt', 'yuv420p', | ||
48 | + '-preset', 'ultrafast', | ||
49 | + output_path] | ||
50 | + writer = subprocess.Popen(command, stdin=subprocess.PIPE) | ||
51 | + | ||
52 | + source, weights, imgsz = input_path, weight_path, 640 | ||
53 | + | ||
54 | + # Initialize | ||
55 | + set_logging() | ||
56 | + device = select_device('') | ||
57 | + | ||
58 | + # Load model | ||
59 | + model = attempt_load(weights, map_location=device) # load FP32 model | ||
60 | + stride = int(model.stride.max()) # model stride | ||
61 | + imgsz = check_img_size(imgsz, s=stride) # check img_size | ||
62 | + names = model.module.names if hasattr(model, 'module') else model.names # get class names | ||
63 | + | ||
64 | + # Set Dataloader | ||
65 | + dataset = LoadImages(source, img_size=imgsz, stride=stride) | ||
66 | + | ||
67 | + # Run inference | ||
68 | + if device.type != 'cpu': | ||
69 | + model(torch.zeros(1, 3, imgsz, imgsz).to(device).type_as(next(model.parameters()))) # run once | ||
70 | + | ||
71 | + t0 = time.time() | ||
72 | + for path, img, im0s, vid_cap in dataset: | ||
73 | + img = torch.from_numpy(img).to(device) | ||
74 | + img = img.float() # uint8 to fp16/32 | ||
75 | + img /= 255.0 # 0 - 255 to 0.0 - 1.0 | ||
76 | + if img.ndimension() == 3: | ||
77 | + img = img.unsqueeze(0) | ||
78 | + | ||
79 | + # Inference | ||
80 | + t1 = time_synchronized() | ||
81 | + pred = model(img, augment=False)[0] | ||
82 | + | ||
83 | + # Apply NMS | ||
84 | + pred = non_max_suppression(pred, max_det=1000) | ||
85 | + t2 = time_synchronized() | ||
86 | + | ||
87 | + # Process detections | ||
88 | + for i, det in enumerate(pred): # detections per image | ||
89 | + p, s, im0, frame = path, '', im0s.copy(), getattr(dataset, 'frame', 0) | ||
90 | + | ||
91 | + s += '%gx%g ' % img.shape[2:] # print string | ||
92 | + | ||
93 | + if len(det): | ||
94 | + # Rescale boxes from img_size to im0 size | ||
95 | + det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round() | ||
96 | + | ||
97 | + # Print results | ||
98 | + for c in det[:, -1].unique(): | ||
99 | + n = (det[:, -1] == c).sum() # detections per class | ||
100 | + s += f"{n} {names[int(c)]}{'s' * (n > 1)}, " # add to string | ||
101 | + | ||
102 | + # Write results | ||
103 | + for *xyxy, conf, cls in reversed(det): | ||
104 | + x1, y1, x2, y2 = int(xyxy[0]), int(xyxy[1]), int(xyxy[2]), int(xyxy[3]) | ||
105 | + src = im0[y1:y2, x1:x2] | ||
106 | + dst = im0.copy() | ||
107 | + dst[y1:y2, x1:x2] = mosaic(src) | ||
108 | + im0 = dst | ||
109 | + | ||
110 | + # Print time (inference + NMS) | ||
111 | + # print(f'{s}Done. ({t2 - t1:.3f}s)') | ||
112 | + | ||
113 | + # Save results (image with detections) | ||
114 | + writer.stdin.write(im0.tobytes()) | ||
115 | + | ||
116 | + writer.stdin.close() | ||
117 | + writer.wait() | ||
118 | + print(f'Done. ({time.time() - t0:.3f}s)') | ||
119 | + | ||
120 | + | ||
121 | +if __name__ == '__main__': | ||
122 | + while True: | ||
123 | + try: | ||
124 | + response = requests.get(SERVER_CHECK_ENDPOINT + '/check') | ||
125 | + data = json.loads(response.text) | ||
126 | + if data['data'] is None: | ||
127 | + continue | ||
128 | + download(SERVER_CHECK_ENDPOINT + '/origin/' + data['data'], INPUT_PATH) | ||
129 | + detect(WEIGHT_PATH, INPUT_PATH, OUTPUT_PATH) | ||
130 | + response = requests.post(SERVER_CHECK_ENDPOINT + '/upload', files={'file': (data['data'], open(OUTPUT_PATH, 'rb'))}) | ||
131 | + print(data['data'] + ' : ' + response.text) | ||
132 | + except: | ||
133 | + print('Error!') | ||
134 | + time.sleep(0.5) |
code/mosaic-server/package-lock.json
0 → 100644
This diff is collapsed. Click to expand it.
code/mosaic-server/package.json
0 → 100644
code/mosaic-server/public/live/.gitkeep
0 → 100644
File mode changed
code/mosaic-server/public/origin/.gitkeep
0 → 100644
File mode changed
code/mosaic-server/server.js
0 → 100644
1 | +const express = require('express'); | ||
2 | +const fs = require('fs').promises; | ||
3 | +const multer = require('multer'); | ||
4 | +const redis = require('redis'); | ||
5 | +const { promisify } = require('util'); | ||
6 | +const { readM3U8 } = require('./utils'); | ||
7 | + | ||
8 | +const RTMP_INPUT_FOLDER = '/root/hls/test_720p2628kbs'; | ||
9 | + | ||
10 | +const app = express(); | ||
11 | +const client = redis.createClient(); | ||
12 | +const upload = multer({ | ||
13 | + storage: multer.diskStorage({ | ||
14 | + destination: (req, file, cb) => cb(null, __dirname + '/public/live'), | ||
15 | + filename: (req, file, cb) => cb(null, file.originalname), | ||
16 | + }), | ||
17 | +}); | ||
18 | + | ||
19 | +const sleep = (ms) => new Promise(r => setTimeout(r, ms)); | ||
20 | +const delAsync = promisify(client.del).bind(client); | ||
21 | +const getAsync = promisify(client.get).bind(client); | ||
22 | +const setAsync = promisify(client.set).bind(client); | ||
23 | +const rpushAsync = promisify(client.rpush).bind(client); | ||
24 | +const lpopAsync = promisify(client.lpop).bind(client); | ||
25 | +const lrangeAsync = promisify(client.lrange).bind(client); | ||
26 | +const saddAsync = promisify(client.sadd).bind(client); | ||
27 | +const sismemberAsync = promisify(client.sismember).bind(client); | ||
28 | +const sremAsync = promisify(client.srem).bind(client); | ||
29 | + | ||
30 | +app.use(express.static(__dirname + '/public')); | ||
31 | + | ||
32 | +app.get('/live.m3u8', async (req, res) => { | ||
33 | + const EXT_X_TARGETDURATION = await getAsync('HLS_EXT_X_TARGETDURATION'); | ||
34 | + const PLAYLIST = await lrangeAsync('HLS_COMPLETE', 0, -1); | ||
35 | + const data = `#EXTM3U | ||
36 | +#EXT-X-PLAYLIST-TYPE:EVENT | ||
37 | +#EXT-X-TARGETDURATION:${EXT_X_TARGETDURATION} | ||
38 | +#EXT-X-VERSION:3 | ||
39 | +#EXT-X-MEDIA-SEQUENCE:0 | ||
40 | +${PLAYLIST.map(x => `#EXTINF:${x.split('/')[1]},\nlive/${x.split('/')[0]}\n`).join('')}`; | ||
41 | + res.set('content-type', 'audio/mpegurl'); | ||
42 | + return res.send(data); | ||
43 | +}); | ||
44 | + | ||
45 | +app.get('/origin.m3u8', async (req, res) => { | ||
46 | + const EXT_X_TARGETDURATION = await getAsync('HLS_EXT_X_TARGETDURATION'); | ||
47 | + const PLAYLIST = await lrangeAsync('HLS_ORIGINAL', 0, -1); | ||
48 | + const data = `#EXTM3U | ||
49 | +#EXT-X-PLAYLIST-TYPE:EVENT | ||
50 | +#EXT-X-TARGETDURATION:${EXT_X_TARGETDURATION} | ||
51 | +#EXT-X-VERSION:3 | ||
52 | +#EXT-X-MEDIA-SEQUENCE:0 | ||
53 | +${PLAYLIST.map(x => `#EXTINF:${x.split('/')[1]},\norigin/${x.split('/')[0]}\n`).join('')}`; | ||
54 | + res.set('content-type', 'audio/mpegurl'); | ||
55 | + return res.send(data); | ||
56 | +}); | ||
57 | + | ||
58 | +app.get('/check', async (req, res) => { | ||
59 | + const data = await lpopAsync('HLS_WAITING'); | ||
60 | + if (data === null) { | ||
61 | + return res.json({ data: null }); | ||
62 | + } | ||
63 | + await rpushAsync('HLS_PROGRESS', data); | ||
64 | + return res.json({ data: data.split('/')[0] }); | ||
65 | +}); | ||
66 | + | ||
67 | +app.post('/upload', upload.single('file'), async (req, res) => { | ||
68 | + await saddAsync('HLS_UPLOADED', req.file.originalname); | ||
69 | + return res.json({ success: true }); | ||
70 | +}); | ||
71 | + | ||
72 | +app.use((req, res, next) => { | ||
73 | + return res.status(404).json({ error: 'Not Found' }); | ||
74 | +}); | ||
75 | + | ||
76 | +app.use((err, req, res, next) => { | ||
77 | + return res.status(500).json({ error: 'Internal Server Error' }); | ||
78 | +}); | ||
79 | + | ||
80 | +app.listen(3000); | ||
81 | + | ||
82 | +const main = async () => { | ||
83 | + for (;;) { | ||
84 | + try { | ||
85 | + const m3u8 = await readM3U8(`${RTMP_INPUT_FOLDER}/index.m3u8`); | ||
86 | + const lastSeq = await getAsync('HLS_EXT_X_MEDIA_SEQUENCE'); | ||
87 | + if (m3u8.EXT_X_MEDIA_SEQUENCE !== lastSeq) { | ||
88 | + if (m3u8.EXT_X_MEDIA_SEQUENCE === '0') { | ||
89 | + await delAsync('HLS_WAITING'); | ||
90 | + await delAsync('HLS_PROGRESS'); | ||
91 | + await delAsync('HLS_UPLOADED'); | ||
92 | + await delAsync('HLS_COMPLETE'); | ||
93 | + await delAsync('HLS_ORIGINAL'); // TODO: debug | ||
94 | + await fs.rmdir(__dirname + '/public/origin/', { recursive: true }); | ||
95 | + await fs.rmdir(__dirname + '/public/live/', { recursive: true }); | ||
96 | + await fs.mkdir(__dirname + '/public/origin/'); | ||
97 | + await fs.mkdir(__dirname + '/public/live/'); | ||
98 | + await setAsync('HLS_EXT_X_TARGETDURATION', m3u8.EXT_X_TARGETDURATION); | ||
99 | + } | ||
100 | + const item = m3u8.PLAYLIST.pop(); | ||
101 | + await fs.copyFile(`${RTMP_INPUT_FOLDER}/${item.file}`, __dirname + `/public/origin/${item.file}`); | ||
102 | + await rpushAsync('HLS_WAITING', `${item.file}/${item.time}`); | ||
103 | + await rpushAsync('HLS_ORIGINAL', `${item.file}/${item.time}`); // TODO: debug | ||
104 | + await setAsync('HLS_EXT_X_MEDIA_SEQUENCE', m3u8.EXT_X_MEDIA_SEQUENCE); | ||
105 | + } | ||
106 | + const first_of_progress = await lrangeAsync('HLS_PROGRESS', 0, 0); | ||
107 | + if (first_of_progress.length !== 0 && await sismemberAsync('HLS_UPLOADED', first_of_progress[0].split('/')[0]) === 1) { | ||
108 | + await rpushAsync('HLS_COMPLETE', first_of_progress[0]); | ||
109 | + await lpopAsync('HLS_PROGRESS'); | ||
110 | + await sremAsync('HLS_UPLOADED', first_of_progress[0].split('/')[0]); | ||
111 | + } | ||
112 | + } catch (e) {} | ||
113 | + await sleep(1000); | ||
114 | + } | ||
115 | +} | ||
116 | + | ||
117 | +main().catch(err => console.error(err)); |
code/mosaic-server/utils.js
0 → 100644
1 | +const fs = require('fs').promises; | ||
2 | + | ||
3 | +module.exports.readM3U8 = async (filePath) => { | ||
4 | + const data = await fs.readFile(filePath); | ||
5 | + const EXT_X_VERSION = String(data).split('#EXT-X-VERSION:')[1].split('\n')[0]; | ||
6 | + const EXT_X_MEDIA_SEQUENCE = String(data).split('#EXT-X-MEDIA-SEQUENCE:')[1].split('\n')[0]; | ||
7 | + const EXT_X_TARGETDURATION = String(data).split('#EXT-X-TARGETDURATION:')[1].split('\n')[0]; | ||
8 | + const EXTINF = String(data).split('#EXTINF:'); | ||
9 | + EXTINF.shift(); | ||
10 | + const PLAYLIST = EXTINF.map(x => ({ file: x.split('\n')[1], time: x.split(',')[0] })); | ||
11 | + return { | ||
12 | + EXT_X_VERSION, | ||
13 | + EXT_X_MEDIA_SEQUENCE, | ||
14 | + EXT_X_TARGETDURATION, | ||
15 | + PLAYLIST, | ||
16 | + }; | ||
17 | +}; |
-
Please register or login to post a comment