common code
This commit is contained in:
4
SearchFrontend/search_ui/.prettierrc
Normal file
4
SearchFrontend/search_ui/.prettierrc
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"tabWidth": 2,
|
||||
"useTabs": false
|
||||
}
|
||||
1214
SearchFrontend/search_ui/package-lock.json
generated
1214
SearchFrontend/search_ui/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -10,12 +10,29 @@
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/react": "^11.14.0",
|
||||
"@emotion/styled": "^11.14.1",
|
||||
"@mui/material": "^7.3.2",
|
||||
"@mui/x-date-pickers": "^8.11.2",
|
||||
"@mui/x-date-pickers-pro": "^8.11.2",
|
||||
"@wojtekmaj/react-daterange-picker": "^7.0.0",
|
||||
"d3": "^7.9.0",
|
||||
"date-fns": "^4.1.0",
|
||||
"dayjs": "^1.11.18",
|
||||
"echarts": "^6.0.0",
|
||||
"echarts-for-react": "^3.0.2",
|
||||
"flex-layout-system": "^2.0.3",
|
||||
"react": "^19.1.1",
|
||||
"react-calendar": "^6.0.0",
|
||||
"react-date-range": "^2.0.1",
|
||||
"react-datepicker": "^8.7.0",
|
||||
"react-dom": "^19.1.1",
|
||||
"timelines-chart": "^2.14.2"
|
||||
"react-flexbox-grid": "^2.1.2",
|
||||
"react-split-pane": "^0.1.92",
|
||||
"rsuite": "^5.83.3",
|
||||
"timelines-chart": "^2.14.2",
|
||||
"uplot": "^1.6.32",
|
||||
"uplot-react": "^1.2.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.33.0",
|
||||
|
||||
@@ -1,6 +1,114 @@
|
||||
/* Root container */
|
||||
#root {
|
||||
max-width: 1280px;
|
||||
margin: 0 auto;
|
||||
padding: 2rem;
|
||||
width: 100vw;
|
||||
max-width: 100vw;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
text-align: center;
|
||||
background: #181a20;
|
||||
}
|
||||
|
||||
/* Video.js player */
|
||||
.video-js-mod {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
object-fit: contain;
|
||||
background: #000;
|
||||
}
|
||||
|
||||
.vjs-tech {
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
/* Main app layout */
|
||||
.app-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100vh;
|
||||
width: 100vw;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
gap: 12px;
|
||||
/* background: #181a20; */
|
||||
}
|
||||
|
||||
|
||||
.flex-group {
|
||||
display: flex;
|
||||
flex-direction: column; /* or 'row' if you want horizontal grouping */
|
||||
flex: 1 1 0;
|
||||
min-width: 0;
|
||||
}
|
||||
/* Section containers */
|
||||
.section-box-horiz {
|
||||
overflow: visible;
|
||||
flex-direction: row;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
/* Section containers */
|
||||
.section-box {
|
||||
flex: 0 0 5%;
|
||||
overflow: visible;
|
||||
/* background: #23272f; */
|
||||
/* padding: 0;
|
||||
box-sizing: border-box;
|
||||
border-radius: 10px;
|
||||
margin: 0 16px;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.10); */
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.timeline-container {
|
||||
flex: 0 0 24%;
|
||||
overflow: visible;
|
||||
background: #20232a;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
border-radius: 10px;
|
||||
margin: 0 16px;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.10);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.section-box:last-of-type {
|
||||
flex: 1 1 68%;
|
||||
overflow: hidden;
|
||||
background: #23272f;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
border-radius: 10px;
|
||||
margin: 0 16px 16px 16px;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.10);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
/* Responsive tweaks */
|
||||
@media (max-width: 600px) {
|
||||
.app-container {
|
||||
gap: 6px;
|
||||
}
|
||||
.section-box,
|
||||
.timeline-container,
|
||||
.section-box:last-of-type {
|
||||
margin: 0 4px;
|
||||
border-radius: 6px;
|
||||
padding: 0;
|
||||
}
|
||||
.date-range-selector {
|
||||
max-width: 98vw;
|
||||
padding: 12px 8px;
|
||||
border-radius: 8px;
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,289 @@
|
||||
import React from 'react';
|
||||
import EmbedTimeline from './components/EmbedTimeline';
|
||||
import './App.css';
|
||||
import data_results from "./util/embed_results_web.json"
|
||||
"use client";
|
||||
import React, { useState, useEffect, useRef, useCallback } from "react";
|
||||
import EmbedTimeline from "./components/EmbedTimeline";
|
||||
import VideoPlayer from "./components/VideoPlayer";
|
||||
// import ModernDateRangeSelector from './components/ModernDateRangeSelector';
|
||||
import CompactDateRangePicker from "./components/CompactDateRangePicker";
|
||||
import CustomDateRangePicker from "./components/DateRangePicker";
|
||||
import "./App.css";
|
||||
import StatusesDisplayHUD from "./components/StatusDisplay";
|
||||
|
||||
function App() {
|
||||
const original_data = useRef(null);
|
||||
const chartRef = useRef(null);
|
||||
const [dataResults, setDataResults] = useState(null);
|
||||
const [statusMessages, setStatusMessages] = useState([]);
|
||||
const [markerTime, setMarkerTime] = useState(0);
|
||||
const playerRef = useRef(null);
|
||||
const playerInstanceRef = useRef(null);
|
||||
// State for the values
|
||||
window.chartRef = chartRef;
|
||||
window.playerRef = playerRef;
|
||||
window.playerInstanceRef = playerInstanceRef;
|
||||
// Slider states
|
||||
|
||||
const [sliderMin, setSliderMin] = useState(0.0);
|
||||
const [sliderMax, setSliderMax] = useState(1.0);
|
||||
// Date range states
|
||||
//
|
||||
|
||||
const [startRange, setStartRange] = useState(
|
||||
new Date(new Date().getTime() - 7 * 24 * 60 * 60 * 1000)
|
||||
);
|
||||
const [endRange, setEndRange] = useState(new Date());
|
||||
// const [endRange, setEndRange] = useState(new Date(new Date().getTime() - 6 * 24 * 60 * 60 * 1000));
|
||||
const [queryText, setQueryText] = useState("A clouded leopard and a human");
|
||||
const [sliderValue, setSliderValue] = useState(0);
|
||||
|
||||
// State to track last submitted values
|
||||
const [lastSubmitted, setLastSubmitted] = useState({
|
||||
startRange,
|
||||
endRange,
|
||||
sliderValue,
|
||||
queryText,
|
||||
});
|
||||
|
||||
// Check if any value has changed
|
||||
const hasChanged =
|
||||
startRange !== lastSubmitted.startRange ||
|
||||
endRange !== lastSubmitted.endRange ||
|
||||
sliderValue !== lastSubmitted.sliderValue ||
|
||||
queryText !== lastSubmitted.queryText;
|
||||
|
||||
// Function to resubmit fetch
|
||||
const handleResubmit = () => {
|
||||
// Start streaming status updates
|
||||
fetch("api/return_status")
|
||||
.then((response) => {
|
||||
const reader = response.body.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = ""; // Accumulate partial text
|
||||
|
||||
function read() {
|
||||
reader.read().then(({ done, value }) => {
|
||||
if (done) {
|
||||
if (buffer) {
|
||||
// console.log("Status:", buffer); // Log any remaining text
|
||||
}
|
||||
setStatusMessages([]);
|
||||
// console.log("Status stream finished");
|
||||
|
||||
return;
|
||||
}
|
||||
// Decode only the new chunk
|
||||
buffer += decoder.decode(value, { stream: true });
|
||||
|
||||
// If your server sends lines, split and log only complete lines:
|
||||
let lines = buffer.split("\n");
|
||||
buffer = lines.pop(); // Save incomplete line for next chunk
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.trim()) {
|
||||
// console.log("Status:", line);
|
||||
console.log(line)
|
||||
setStatusMessages((msgs) => [...msgs, JSON.parse(line)]);
|
||||
}
|
||||
}
|
||||
|
||||
read();
|
||||
});
|
||||
}
|
||||
read();
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error while streaming status:", error);
|
||||
});
|
||||
|
||||
const params = new URLSearchParams();
|
||||
params.append("startRange", startRange.toISOString());
|
||||
params.append("endRange", endRange.toISOString());
|
||||
params.append("threshold", 0.0);
|
||||
params.append("query", queryText);
|
||||
setDataResults({ videos: [], breaks: [] });
|
||||
|
||||
fetch("api/videos.json?" + params.toString())
|
||||
.then((res) => res.json())
|
||||
.then((data) => {
|
||||
const max_value = Math.max(
|
||||
...data["videos"].map((vid) => vid["embed_scores"]["score"][1])
|
||||
);
|
||||
setSliderMax(max_value);
|
||||
original_data.current = data;
|
||||
window.original_data = original_data;
|
||||
setDataResults(data);
|
||||
});
|
||||
|
||||
setLastSubmitted({ startRange, endRange, sliderValue, queryText });
|
||||
};
|
||||
|
||||
function updateDataAndValue(newValue) {
|
||||
const floatValue = parseFloat(newValue);
|
||||
setSliderValue(floatValue);
|
||||
var newData = JSON.parse(JSON.stringify(original_data.current));
|
||||
newData["videos"] = newData["videos"].filter(
|
||||
(vid) => vid["embed_scores"]["score"][1] >= floatValue
|
||||
);
|
||||
setDataResults(newData);
|
||||
}
|
||||
|
||||
function setMarkerValueNonReactive(inputValue) {
|
||||
let chart = chartRef.current.getEchartsInstance();
|
||||
let options = chart.getOption();
|
||||
let mappers = options["mappers"];
|
||||
|
||||
let vv = {
|
||||
xAxis: mappers["real_to_virtual"](new Date(inputValue)),
|
||||
lineStyle: { type: "solid", color: "#FF0000", width: 2 },
|
||||
label: {
|
||||
show: false,
|
||||
formatter: "Break",
|
||||
position: "bottom",
|
||||
color: "#888",
|
||||
fontSize: 10,
|
||||
},
|
||||
};
|
||||
|
||||
let markLine = {
|
||||
symbol: ["none", "none"],
|
||||
data: [vv],
|
||||
lineStyle: { type: "dashed", color: "#FF0000", width: 2 },
|
||||
silent: true,
|
||||
animation: false,
|
||||
};
|
||||
|
||||
// if ("markLine" in options["series"][1]) {
|
||||
if (false) {
|
||||
let vv_new = {
|
||||
xAxis: mappers["real_to_virtual"](new Date(inputValue)),
|
||||
};
|
||||
let markLine_new = {
|
||||
data: [vv_new],
|
||||
};
|
||||
|
||||
chart.setOption(
|
||||
{
|
||||
series: [{}, { markLine: { data: [vv_new] } }],
|
||||
},
|
||||
false,
|
||||
["series.markLine"]
|
||||
);
|
||||
} else {
|
||||
chart.setOption(
|
||||
{
|
||||
series: [{}, { markLine: markLine }],
|
||||
},
|
||||
false,
|
||||
["series.markLine"]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Memoize the timeline click handler
|
||||
const handleTimelineClick = useCallback(
|
||||
(path, timeoffset) => {
|
||||
console.log("Timeline clicked:", path, timeoffset);
|
||||
|
||||
if (playerRef.current && playerInstanceRef.current) {
|
||||
console.log("Seeking video player to:", path, timeoffset);
|
||||
playerInstanceRef.current.src({
|
||||
src: "api/" + path,
|
||||
type: "video/mp4",
|
||||
});
|
||||
playerInstanceRef.current.on("loadedmetadata", () => {
|
||||
playerInstanceRef.current.currentTime(timeoffset);
|
||||
});
|
||||
}
|
||||
},
|
||||
[] // Empty dependency array since it only uses playerRef
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const params = new URLSearchParams(window.location.search); // id=123
|
||||
|
||||
if (params.get("test_mode") == "true") {
|
||||
setStartRange(new Date(new Date().getTime() - 2 * 24 * 60 * 60 * 1000));
|
||||
setEndRange(new Date(new Date().getTime() - 1 * 24 * 60 * 60 * 1000));
|
||||
}
|
||||
handleResubmit();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="App">
|
||||
<h1>Embed Timeline Visualization</h1>
|
||||
<EmbedTimeline data_in={data_results}/>
|
||||
<div className="app-container">
|
||||
<div className="section-box-horiz">
|
||||
<div className="flex-group">
|
||||
<CustomDateRangePicker
|
||||
startDate={startRange}
|
||||
endDate={endRange}
|
||||
setStartRange={setStartRange}
|
||||
setEndRange={setEndRange}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-group">
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Enter query"
|
||||
value={queryText}
|
||||
onChange={(e) => setQueryText(e.target.value)}
|
||||
style={{
|
||||
marginLeft: "16px",
|
||||
marginRight: "16px",
|
||||
padding: "8px",
|
||||
borderRadius: "4px",
|
||||
border: "1px solid #343a40",
|
||||
color: "#fff", // Text white
|
||||
backgroundColor: "#23272f", // Optional: dark background for contrast
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-group">
|
||||
<label
|
||||
style={{ marginLeft: "8px", marginRight: "8px", color: "#fff" }}
|
||||
>
|
||||
Threshold:
|
||||
</label>
|
||||
</div>
|
||||
<div className="flex-group">
|
||||
<input
|
||||
type="range"
|
||||
min={sliderMin}
|
||||
max={sliderMax}
|
||||
step={0.001}
|
||||
value={sliderValue}
|
||||
onChange={(e) => updateDataAndValue(e.target.value)}
|
||||
style={{
|
||||
width: "120px",
|
||||
color: "#fff", // Text white
|
||||
backgroundColor: "#23272f", // Optional: dark background for contrast
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-group">
|
||||
<span style={{ marginLeft: "8px", color: "#fff" }}>
|
||||
{sliderValue.toFixed(2)}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex-group">
|
||||
<button onClick={handleResubmit}>Resubmit</button>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<StatusesDisplayHUD statusMessages={statusMessages} />
|
||||
</div>
|
||||
|
||||
<div className="timeline-container">
|
||||
<EmbedTimeline
|
||||
chartRef={chartRef}
|
||||
data_in={dataResults}
|
||||
onTimelineClick={handleTimelineClick}
|
||||
/>
|
||||
</div>
|
||||
<div className="section-box">
|
||||
<VideoPlayer
|
||||
videoRef={playerRef}
|
||||
playerInstanceRef={playerInstanceRef}
|
||||
setMarkerTimeFunc={setMarkerValueNonReactive}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
import React, { useState } from "react";
|
||||
import DatePicker from "react-datepicker";
|
||||
import "react-datepicker/dist/react-datepicker.css";
|
||||
|
||||
export default function CompactDateRangePicker({ startDate, endDate, setStartDate, setEndDate}) {
|
||||
// const [startDate, setStartDate] = useState(null);
|
||||
// const [endDate, setEndDate] = useState(null);
|
||||
console.log(startDate)
|
||||
console.log(endDate)
|
||||
console.log(setStartDate)
|
||||
console.log(setEndDate)
|
||||
return (
|
||||
<DatePicker
|
||||
selectsRange
|
||||
startDate={startDate}
|
||||
endDate={endDate}
|
||||
onChange={([start, end]) => {
|
||||
setStartDate(start);
|
||||
setEndDate(end);
|
||||
if (end && onChange) onChange({ startDate: start, endDate: end });
|
||||
}}
|
||||
isClearable
|
||||
maxDate={new Date()}
|
||||
placeholderText="Select date range"
|
||||
withPortal
|
||||
/>
|
||||
);
|
||||
}
|
||||
77
SearchFrontend/search_ui/src/components/DateRangePicker.jsx
Normal file
77
SearchFrontend/search_ui/src/components/DateRangePicker.jsx
Normal file
@@ -0,0 +1,77 @@
|
||||
import React, { useState, useRef, useEffect } from "react";
|
||||
|
||||
import "react-date-range/dist/styles.css"; // main css file
|
||||
import "react-date-range/dist/theme/default.css"; // theme css file
|
||||
import { DateRange } from "react-date-range";
|
||||
|
||||
export default function CustomDateRangePicker({ startDate, endDate, setStartRange, setEndRange }) {
|
||||
const minDate = new Date("2025-07-01")
|
||||
const maxDate = new Date()
|
||||
const [showCalendar, setShowCalendar] = useState(false);
|
||||
const calendarRef = useRef(null);
|
||||
|
||||
// Create range object for react-date-range
|
||||
const range = [{
|
||||
startDate: startDate,
|
||||
endDate: endDate,
|
||||
key: 'selection'
|
||||
}];
|
||||
|
||||
const handleSelect = (ranges) => {
|
||||
const { startDate: newStart, endDate: newEnd } = ranges.selection;
|
||||
|
||||
setStartRange(newStart);
|
||||
setEndRange(newEnd);
|
||||
|
||||
if (
|
||||
newStart &&
|
||||
newEnd &&
|
||||
newStart.getTime() !== newEnd.getTime()
|
||||
) {
|
||||
setShowCalendar(false);
|
||||
}
|
||||
};
|
||||
|
||||
// Hide calendar when clicking outside
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event) => {
|
||||
if (
|
||||
calendarRef.current &&
|
||||
!calendarRef.current.contains(event.target)
|
||||
) {
|
||||
setShowCalendar(false);
|
||||
}
|
||||
};
|
||||
|
||||
if (showCalendar) {
|
||||
document.addEventListener("mousedown", handleClickOutside);
|
||||
} else {
|
||||
document.removeEventListener("mousedown", handleClickOutside);
|
||||
}
|
||||
|
||||
return () => {
|
||||
document.removeEventListener("mousedown", handleClickOutside);
|
||||
};
|
||||
}, [showCalendar]);
|
||||
|
||||
return (
|
||||
<div ref={calendarRef} style={{ position: "relative" }}>
|
||||
<button onClick={() => setShowCalendar((prev) => !prev)}>
|
||||
{startDate?.toLocaleDateString()} -{" "}
|
||||
{endDate?.toLocaleDateString()}
|
||||
</button>
|
||||
|
||||
{showCalendar && (
|
||||
<div style={{ position: "absolute", zIndex: 10 }}>
|
||||
<DateRange
|
||||
minDate={minDate}
|
||||
maxDate={maxDate}
|
||||
ranges={range}
|
||||
onChange={handleSelect}
|
||||
moveRangeOnFirstSelection={false}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
.time-block {
|
||||
opacity: 0.5;
|
||||
stroke-width: 1;
|
||||
fill: #4CAF50;
|
||||
stroke: #2E7D32;
|
||||
}
|
||||
|
||||
.score-line {
|
||||
fill: none;
|
||||
stroke-width: 2;
|
||||
stroke: #4CAF50;
|
||||
}
|
||||
|
||||
.score-dot {
|
||||
r: 4;
|
||||
stroke: white;
|
||||
stroke-width: 1;
|
||||
fill: #4CAF50;
|
||||
}
|
||||
|
||||
.axis {
|
||||
font-size: 12px;
|
||||
}
|
||||
.grid-line {
|
||||
stroke: #e0e0e0;
|
||||
stroke-dasharray: 2,2;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
@@ -1,361 +1,481 @@
|
||||
import React, { useRef, useEffect, useState } from 'react';
|
||||
import ReactECharts from 'echarts-for-react';
|
||||
// import './EmbedTimeline.css';
|
||||
import React, { useRef, useEffect } from "react";
|
||||
import ReactECharts from "echarts-for-react";
|
||||
|
||||
const EmbedTimeline = React.memo(function EmbedTimeline({
|
||||
chartRef,
|
||||
data_in,
|
||||
onTimelineClick,
|
||||
markerTime,
|
||||
}) {
|
||||
// --- Early return if loading ---
|
||||
if (!data_in) return <div>Loading....</div>;
|
||||
|
||||
export default function EmbedTimeline({ data_in }) {
|
||||
// --- Constants ---
|
||||
const BREAK_GAP = 0;
|
||||
console.log("REDRAW");
|
||||
|
||||
const timeFormatOptions = {
|
||||
withSeconds: {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
hour: "numeric",
|
||||
minute: "2-digit",
|
||||
second: "numeric",
|
||||
hour12: true,
|
||||
},
|
||||
edges: {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
hour: "numeric",
|
||||
minute: "2-digit",
|
||||
hour12: true,
|
||||
},
|
||||
within: {
|
||||
hour: "numeric",
|
||||
minute: "2-digit",
|
||||
hour12: true,
|
||||
},
|
||||
};
|
||||
|
||||
var result = []
|
||||
const mean = (data) => {
|
||||
if (data.length < 1) {
|
||||
return;
|
||||
}
|
||||
return data.reduce((prev, current) => prev + current) / data.length;
|
||||
};
|
||||
|
||||
function prepareVideoData(videos) {
|
||||
let new_data = [];
|
||||
videos.forEach((item) => {
|
||||
let start_time = new Date(1000 * item["start_time"]);
|
||||
if ("embed_scores" in item) {
|
||||
var mean_val = item["embed_scores"]["time"] / 2;
|
||||
// var max_score = Math.max(...item['embed_scores']['score'])
|
||||
var max_score = item["embed_scores"]["score"][1];
|
||||
var max_score_time = new Date(
|
||||
start_time.getTime() + 1000 * item["embed_scores"]["score"][3]
|
||||
);
|
||||
var new_time = new Date(start_time.getTime() + 1000 * 2 * mean_val);
|
||||
new_data.push([
|
||||
new Date(start_time.getTime()),
|
||||
new_time,
|
||||
max_score,
|
||||
max_score_time,
|
||||
]);
|
||||
// new_data.push([new_time, item['embed_scores']['score'][idx]]);
|
||||
|
||||
for (let idx_outer = 0; idx_outer < data_in.length; idx_outer++) {
|
||||
let item = data_in[idx_outer]
|
||||
let start_time = Date.parse(item["start_time"])
|
||||
var new_data = [];
|
||||
if ('embed_scores' in item) {
|
||||
for (let idx = 0; idx < item['embed_scores']['time'].length; idx++) {
|
||||
var new_time = 1000 * item['embed_scores']['time'][idx] + start_time
|
||||
// Math.max.apply(Math, item['embed_scores']['time'].map(function(o) { return o.y; }))
|
||||
// item['embed_scores']['time'].forEach((sec, idx) => {
|
||||
// let new_time = new Date(start_time.getTime() + 1000 * sec);
|
||||
|
||||
new_data.push([new_time, item['embed_scores']['score'][idx]])
|
||||
}
|
||||
}
|
||||
result.push(new_data)
|
||||
// new_data.push([new_time, item['embed_scores']['score'][idx]]);
|
||||
// });
|
||||
}
|
||||
});
|
||||
|
||||
// Remove duplicates and sort
|
||||
return Array.from(new Set(new_data.map(JSON.stringify)), JSON.parse).sort(
|
||||
(a, b) => new Date(a[0]) - new Date(b[0])
|
||||
);
|
||||
}
|
||||
|
||||
function calculateBreaks(videos) {
|
||||
const breaks = [];
|
||||
if (videos.length < 3) {
|
||||
return breaks;
|
||||
}
|
||||
let t_diff = videos.at(-1)["end_time"] - videos[0]["start_time"];
|
||||
|
||||
for (let i = 0; i < videos.length - 1; i++) {
|
||||
let end_now = videos[i]["end_time"];
|
||||
let start_next = videos[i + 1]["start_time"];
|
||||
if (start_next - end_now > 60 * 60) {
|
||||
// still in unix timestamp. break only if spaces of 60 minutes
|
||||
breaks.push([end_now, start_next]);
|
||||
}
|
||||
}
|
||||
|
||||
function reframe_data(item, idx) {
|
||||
return breaks;
|
||||
}
|
||||
|
||||
function fillNulls(data) {
|
||||
const with_nulls = [];
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
with_nulls.push(data[i]);
|
||||
if (i < data.length - 1) {
|
||||
const curr_time = new Date(data[i][0]).getTime();
|
||||
const next_time = new Date(data[i + 1][0]).getTime();
|
||||
if (next_time - curr_time > 1000) {
|
||||
// with_nulls.push([new Date(curr_time + 1), null]);
|
||||
// with_nulls.push([new Date(curr_time + 1), 0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return with_nulls;
|
||||
}
|
||||
|
||||
function prepareBreaks(breaksRaw) {
|
||||
return breaksRaw.map(([start, end]) => ({
|
||||
start: new Date(1000 * start),
|
||||
end: new Date(1000 * end),
|
||||
gap: BREAK_GAP,
|
||||
isExpanded: false,
|
||||
}));
|
||||
}
|
||||
function buildVirtualTimeMapper(breaks) {
|
||||
const sortedBreaks = breaks.slice().sort((a, b) => a.start - b.start);
|
||||
return function (realDate) {
|
||||
let offset = 0;
|
||||
let realMs = realDate.getTime();
|
||||
for (const br of sortedBreaks) {
|
||||
if (realMs >= br.end.getTime()) {
|
||||
offset += br.end.getTime() - br.start.getTime();
|
||||
} else if (realMs > br.start.getTime()) {
|
||||
offset += realMs - br.start.getTime();
|
||||
break;
|
||||
}
|
||||
}
|
||||
return realMs - offset;
|
||||
};
|
||||
}
|
||||
|
||||
function mapVirtualToRealTime(virtualMs, breaks, virtualTime) {
|
||||
let realMs = virtualMs;
|
||||
for (const br of breaks) {
|
||||
const breakStartVirtual = virtualTime(br.start);
|
||||
const breakDuration = br.end.getTime() - br.start.getTime();
|
||||
if (virtualMs >= breakStartVirtual) {
|
||||
realMs += breakDuration;
|
||||
}
|
||||
}
|
||||
return realMs;
|
||||
}
|
||||
|
||||
function buildSeries(item, idx) {
|
||||
const data = item.map(function (item, index) {
|
||||
return {
|
||||
value: item,
|
||||
};
|
||||
});
|
||||
|
||||
console.log(data)
|
||||
|
||||
return {
|
||||
type: "custom",
|
||||
renderItem: function (params, api) {
|
||||
var yValue = api.value(2);
|
||||
var start = api.coord([api.value(0), yValue]);
|
||||
var size = api.size([api.value(1) - api.value(0), yValue]);
|
||||
var style = api.style();
|
||||
var maxTime = api.coord([api.value(3), yValue]);
|
||||
return {
|
||||
type: 'line',
|
||||
symbol: 'none',
|
||||
smooth: true,
|
||||
lineStyle: {
|
||||
normal: {
|
||||
color: 'green',
|
||||
width: 1,
|
||||
}
|
||||
type: "group",
|
||||
children: [
|
||||
{
|
||||
type: "rect",
|
||||
shape: {
|
||||
x: start[0],
|
||||
y: start[1],
|
||||
width: size[0],
|
||||
height: size[1],
|
||||
},
|
||||
style: { fill: "#00F0003F" },
|
||||
},
|
||||
data: item
|
||||
}
|
||||
}
|
||||
const series_out = result.map(reframe_data)
|
||||
{
|
||||
type: "circle",
|
||||
shape: { cx: maxTime[0], cy: maxTime[1], r: 1 },
|
||||
style: { fill: "#00F0003F" },
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
symbol: "none",
|
||||
smooth: true,
|
||||
large: true,
|
||||
lineStyle: { normal: { color: "green", width: 1 } },
|
||||
// data: item.map(d => [d[0], d[1], d[2], d[3]]),
|
||||
data: data,
|
||||
// sampling: 'lttb',
|
||||
triggerLineEvent: true,
|
||||
z: 11,
|
||||
};
|
||||
}
|
||||
|
||||
const option = {
|
||||
xAxis: {
|
||||
type: 'time',
|
||||
boundaryGap: false
|
||||
},
|
||||
yAxis: {
|
||||
type: 'value'
|
||||
},
|
||||
dataZoom: [
|
||||
function buildInvisibleHitBoxSeries(item, idx) {
|
||||
const data = item.map(function (item, index) {
|
||||
return {
|
||||
value: item,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
type: "custom",
|
||||
renderItem: function (params, api) {
|
||||
var yValue = api.value(2);
|
||||
var start = api.coord([api.value(0), yValue]);
|
||||
var size = api.size([api.value(1) - api.value(0), yValue]);
|
||||
var style = api.style();
|
||||
|
||||
var maxTime = api.coord([api.value(3), yValue]);
|
||||
return {
|
||||
type: "group",
|
||||
children: [
|
||||
{
|
||||
type: 'inside',
|
||||
start: 0,
|
||||
end: 100
|
||||
type: "rect",
|
||||
shape: {
|
||||
x: start[0],
|
||||
y: start[1],
|
||||
width: size[0],
|
||||
height: size[1],
|
||||
},
|
||||
style: { fill: "#00F0003F" },
|
||||
},
|
||||
{
|
||||
start: 0,
|
||||
end: 100
|
||||
}
|
||||
],
|
||||
series: series_out
|
||||
type: "circle",
|
||||
shape: { cx: maxTime[0], cy: maxTime[1], r: 1 },
|
||||
style: { fill: "#00F0003F" },
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
symbol: "none",
|
||||
smooth: true,
|
||||
// large: true,
|
||||
lineStyle: { normal: { color: "green", width: 1 } },
|
||||
// data: item.map(d => [d[0], d[1], d[2], d[3]]),
|
||||
data: data,
|
||||
// sampling: 'lttb',
|
||||
triggerLineEvent: true,
|
||||
z: 11,
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<ReactECharts option={option} style={{ height: 400 }} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
// return {
|
||||
// type: 'line',
|
||||
// symbol: 'none',
|
||||
// smooth: true,
|
||||
// large: true,
|
||||
// lineStyle: { width: 100, opacity: 0 },
|
||||
// data: item.map(d => [d[0], d[1]]),
|
||||
// sampling: 'lttb',
|
||||
// triggerLineEvent: true,
|
||||
// z: 10
|
||||
// };
|
||||
}
|
||||
|
||||
function buildBlankSeries() {
|
||||
return {
|
||||
type: "line",
|
||||
symbol: "none",
|
||||
lineStyle: { width: 100, opacity: 0 },
|
||||
data: [],
|
||||
z: 4,
|
||||
};
|
||||
}
|
||||
|
||||
// --- Data Processing ---
|
||||
const videoData = prepareVideoData(data_in["videos"]);
|
||||
const withNulls = videoData;
|
||||
data_in["calc_breaks"] = calculateBreaks(data_in["videos"]);
|
||||
|
||||
// const withNulls = fillNulls(videoData);
|
||||
const breaks = prepareBreaks(data_in["calc_breaks"]);
|
||||
const virtualTime = buildVirtualTimeMapper(breaks);
|
||||
|
||||
// const EmbedTimeline = ({ data }) => {
|
||||
// const containerRef = useRef(null);
|
||||
const breaks_split = data_in["calc_breaks"].flat(1).map(function (x) {
|
||||
return x * 1000;
|
||||
});
|
||||
// if (videoData.length > 2) {
|
||||
// breaks_split.unshift(new Date(videoData[0][0]).getTime())
|
||||
// breaks_split.push(new Date(videoData.at(-1)[0]).getTime())
|
||||
// }
|
||||
const paired_splits = [];
|
||||
for (let i = 0; i < breaks_split.length; i += 2) {
|
||||
paired_splits.push([
|
||||
breaks_split[i],
|
||||
breaks_split[i + 1],
|
||||
breaks_split[i] / 2 + breaks_split[i + 1] / 2,
|
||||
]);
|
||||
}
|
||||
const split_centers = paired_splits.map((d) => new Date(d[2]));
|
||||
const splitCenterVirtualTimes = split_centers.map((d) => virtualTime(d));
|
||||
const splitCenterLabels = split_centers.map((d) =>
|
||||
new Date(d).toLocaleTimeString("en-US", timeFormatOptions.edges)
|
||||
);
|
||||
|
||||
// useEffect(() => {
|
||||
// if (!containerRef.current) return;
|
||||
const splitCenterMarkLines = splitCenterVirtualTimes.map((vt, i) => ({
|
||||
xAxis: vt,
|
||||
// make the line invisible
|
||||
lineStyle: { width: 0, color: "transparent" },
|
||||
// show the precomputed text
|
||||
label: {
|
||||
show: true,
|
||||
formatter: splitCenterLabels[i],
|
||||
position: "end", // try other values if overlap; 'end', 'insideStartTop', etc.
|
||||
color: "#FFFFFF",
|
||||
fontSize: 11,
|
||||
rotate: 90,
|
||||
},
|
||||
}));
|
||||
|
||||
// var myChart = ReactECharts.init(containerRef);
|
||||
const virtualData = withNulls.map(
|
||||
([realStartTime, realEndTime, value, realMaxTime]) => [
|
||||
virtualTime(new Date(realStartTime)),
|
||||
virtualTime(new Date(realEndTime)),
|
||||
value,
|
||||
virtualTime(new Date(realMaxTime)),
|
||||
]
|
||||
);
|
||||
const result = [virtualData];
|
||||
const ymax = Math.max(...virtualData.map((d) => d[2]));
|
||||
// --- Series ---
|
||||
const seriesNormal = result.map(buildSeries);
|
||||
// const seriesInvisible = result.map(buildInvisibleHitBoxSeries);
|
||||
const series_out = [].concat(seriesNormal, buildBlankSeries());
|
||||
|
||||
// // Specify the configuration items and data for the chart
|
||||
// var option = {
|
||||
// title: {
|
||||
// text: 'ECharts Getting Started Example'
|
||||
// },
|
||||
// tooltip: {},
|
||||
// legend: {
|
||||
// data: ['sales']
|
||||
// },
|
||||
// xAxis: {
|
||||
// data: ['Shirts', 'Cardigans', 'Chiffons', 'Pants', 'Heels', 'Socks']
|
||||
// },
|
||||
// yAxis: {},
|
||||
// series: [
|
||||
// {
|
||||
// name: 'sales',
|
||||
// type: 'bar',
|
||||
// data: [5, 20, 36, 10, 10, 20]
|
||||
// }
|
||||
// ]
|
||||
// };
|
||||
// --- Break MarkLines ---
|
||||
const breakMarkLines = breaks.map((br) => ({
|
||||
xAxis: virtualTime(br.start),
|
||||
lineStyle: { type: "dashed", color: "#888", width: 2 },
|
||||
label: {
|
||||
show: true,
|
||||
formatter: "Break",
|
||||
position: "bottom",
|
||||
color: "#888",
|
||||
fontSize: 10,
|
||||
},
|
||||
}));
|
||||
|
||||
// // Display the chart using the configuration items and data just specified.
|
||||
// myChart.setOption(option);
|
||||
// });
|
||||
// return (
|
||||
// <div ref={containerRef}>
|
||||
// </div>
|
||||
// )
|
||||
// }
|
||||
// export default EmbedTimeline;
|
||||
// Attach break mark lines to the first series
|
||||
if (seriesNormal[0]) {
|
||||
seriesNormal[0].markLine = {
|
||||
symbol: ["none", "none"],
|
||||
data: [...(breakMarkLines || []), ...(splitCenterMarkLines || [])],
|
||||
lineStyle: { type: "dashed", color: "#888", width: 2 },
|
||||
label: { show: true, position: "bottom", color: "#888", fontSize: 10 },
|
||||
};
|
||||
}
|
||||
|
||||
// --- Axis & Chart Option ---
|
||||
const virtual_x_min = virtualData.length > 0 ? virtualData[0][0] : 0;
|
||||
const virtual_x_max =
|
||||
virtualData.length > 0 ? virtualData[virtualData.length - 1][0] : 1;
|
||||
|
||||
const option = {
|
||||
animation: false,
|
||||
// progressive: 0, // Disable progressive rendering
|
||||
progressiveThreshold: 100000 , // Disable progressive threshold
|
||||
mappers: {
|
||||
virtual_to_real: mapVirtualToRealTime,
|
||||
real_to_virtual: virtualTime,
|
||||
},
|
||||
response: true,
|
||||
grid: {
|
||||
top: 30, // Remove top padding
|
||||
left: 10,
|
||||
right: 20,
|
||||
bottom: 60,
|
||||
containLabel: true,
|
||||
},
|
||||
dataZoom: [
|
||||
{
|
||||
type: "slider",
|
||||
show: true,
|
||||
xAxisIndex: [0],
|
||||
startValue: virtual_x_min,
|
||||
endValue: virtual_x_max,
|
||||
filterMode: 'weakFilter',
|
||||
},
|
||||
{
|
||||
type: "inside",
|
||||
xAxisIndex: [0],
|
||||
startValue: virtual_x_min,
|
||||
endValue: virtual_x_max,
|
||||
filterMode: 'weakFilter',
|
||||
},
|
||||
],
|
||||
xAxis: {
|
||||
type: "value",
|
||||
min: virtual_x_min,
|
||||
max: virtual_x_max,
|
||||
splitLine: { show: false },
|
||||
axisLabel: {
|
||||
formatter: function (virtualMs) {
|
||||
let range = virtual_x_max - virtual_x_min;
|
||||
if (
|
||||
chartRef &&
|
||||
chartRef.current &&
|
||||
chartRef.current.getEchartsInstance
|
||||
) {
|
||||
const chart = chartRef.current.getEchartsInstance();
|
||||
const dz = chart.getOption().dataZoom?.[0];
|
||||
if (
|
||||
dz &&
|
||||
dz.startValue !== undefined &&
|
||||
dz.endValue !== undefined
|
||||
) {
|
||||
range = dz.endValue - dz.startValue;
|
||||
}
|
||||
}
|
||||
const realTime = mapVirtualToRealTime(virtualMs, breaks, virtualTime);
|
||||
if (realTime) {
|
||||
const useSeconds = range < 5 * 60 * 1000;
|
||||
const fmt = useSeconds
|
||||
? timeFormatOptions.withSeconds
|
||||
: timeFormatOptions.edges;
|
||||
return new Date(realTime).toLocaleTimeString("en-US", fmt);
|
||||
}
|
||||
return "";
|
||||
},
|
||||
},
|
||||
},
|
||||
yAxis: {
|
||||
type: "value",
|
||||
min: 0.0,
|
||||
max: ymax,
|
||||
splitLine: { show: false },
|
||||
},
|
||||
series: series_out.map((s) => ({
|
||||
...s,
|
||||
animation: false, // Disable animation for each series
|
||||
animationDuration: 0,
|
||||
})),
|
||||
};
|
||||
|
||||
// const EmbedTimelineF = ({ data }) => {
|
||||
// const svgRef = useRef(null);
|
||||
// const containerRef = useRef(null);
|
||||
// const [showLabels, setShowLabels] = useState(true);
|
||||
// const [zoomLevel, setZoomLevel] = useState(1);
|
||||
// --- Chart Event Handlers ---
|
||||
async function onChartClick(params, echarts) {
|
||||
const nativeEvent = params.event.event;
|
||||
const pixel = [nativeEvent.offsetX, nativeEvent.offsetY];
|
||||
const dataCoord = echarts.convertFromPixel({ seriesIndex: 0 }, pixel);
|
||||
|
||||
// useEffect(() => {
|
||||
// if (!svgRef.current) return;
|
||||
const res = await fetch("/api/events/click", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
timestamp:
|
||||
mapVirtualToRealTime(dataCoord[0], breaks, virtualTime) / 1000,
|
||||
}),
|
||||
});
|
||||
if (!res.ok) throw new Error(`HTTP error: ${res.status}`);
|
||||
const { path, timeoffset } = await res.json();
|
||||
if (onTimelineClick)
|
||||
onTimelineClick(path, virtualTime(new Date(timeoffset)));
|
||||
}
|
||||
|
||||
// // Clear any existing SVG content
|
||||
// d3.select(svgRef.current).selectAll("*").remove();
|
||||
|
||||
// // Parse dates and prepare data
|
||||
// const parseTime = d3.timeParse("%Y-%m-%dT%H:%M:%S.%f");
|
||||
// const parseTimeAlt = d3.timeParse("%Y-%m-%dT%H:%M:%S");
|
||||
|
||||
// const parseDate = (dateStr) => {
|
||||
// return parseTime(dateStr) || parseTimeAlt(dateStr);
|
||||
// };
|
||||
|
||||
// const processedData = data.map((d, i) => ({
|
||||
// id: i,
|
||||
// startTime: parseDate(d.start_time),
|
||||
// endTime: parseDate(d.end_time),
|
||||
// scores: d.embed_scores.time.map((time, j) => ({
|
||||
// time: parseDate(time),
|
||||
// score: d.embed_scores.score[j]
|
||||
// }))
|
||||
// }));
|
||||
|
||||
// // Set up dimensions
|
||||
// const margin = { top: 50, right: 50, bottom: 50, left: 50 };
|
||||
// const plotHeight = 300;
|
||||
// const blockHeight = 30;
|
||||
// const baseWidth = 1000;
|
||||
// const width = baseWidth * zoomLevel - margin.left - margin.right;
|
||||
|
||||
// // Find overall time range
|
||||
// const allTimes = processedData.flatMap(d => [d.startTime, d.endTime, ...d.scores.map(s => s.time)]);
|
||||
// const timeExtent = d3.extent(allTimes);
|
||||
|
||||
// // Add some padding to time range
|
||||
// const timePadding = (timeExtent[1] - timeExtent[0]) * 0.02;
|
||||
// timeExtent[0] = new Date(timeExtent[0].getTime() - timePadding);
|
||||
// timeExtent[1] = new Date(timeExtent[1].getTime() + timePadding);
|
||||
|
||||
// // Find score range
|
||||
// const allScores = processedData.flatMap(d => d.scores.map(s => s.score));
|
||||
// const scoreExtent = d3.extent(allScores);
|
||||
// const scorePadding = (scoreExtent[1] - scoreExtent[0]) * 0.1;
|
||||
// scoreExtent[0] -= scorePadding;
|
||||
// scoreExtent[1] += scorePadding;
|
||||
|
||||
// // Create main SVG
|
||||
// const svg = d3.select(svgRef.current)
|
||||
// .attr("width", width + margin.left + margin.right)
|
||||
// .attr("height", plotHeight + margin.top + margin.bottom);
|
||||
|
||||
// const mainGroup = svg.append("g")
|
||||
// .attr("transform", `translate(${margin.left}, ${margin.top})`);
|
||||
|
||||
// // Create scales
|
||||
// const xScale = d3.scaleTime()
|
||||
// .domain(timeExtent)
|
||||
// .range([0, width]);
|
||||
|
||||
// const yScoreScale = d3.scaleLinear()
|
||||
// .domain(scoreExtent)
|
||||
// .range([plotHeight * 0.6, 0]);
|
||||
|
||||
// // Zoom functionality
|
||||
// const zoom = d3.zoom()
|
||||
// .scaleExtent([1, 20])
|
||||
// .on("zoom", (event) => {
|
||||
// // Update zoom transform
|
||||
// const newTransform = event.transform;
|
||||
|
||||
// // Update x-scale with zoom
|
||||
// const newXScale = newTransform.rescaleX(xScale);
|
||||
|
||||
// // Function to update visualization elements
|
||||
// const updateVisualization = () => {
|
||||
// // Update score lines
|
||||
// mainGroup.selectAll(".score-line")
|
||||
// .attr("d", d3.line()
|
||||
// .x(s => newXScale(s.time))
|
||||
// .y(s => yScoreScale(s.score))
|
||||
// );
|
||||
|
||||
// // Update score dots
|
||||
// mainGroup.selectAll(".score-dot")
|
||||
// .attr("cx", s => newXScale(s.time));
|
||||
|
||||
// // Update time blocks
|
||||
// mainGroup.selectAll(".time-block")
|
||||
// .attr("x", d => newXScale(d.startTime))
|
||||
// .attr("width", d => Math.max(2, newXScale(d.endTime) - newXScale(d.startTime)));
|
||||
|
||||
// // Update time labels if visible
|
||||
// if (showLabels) {
|
||||
// mainGroup.selectAll(".block-start-label")
|
||||
// .attr("x", d => newXScale(d.startTime));
|
||||
// mainGroup.selectAll(".block-end-label")
|
||||
// .attr("x", d => newXScale(d.endTime));
|
||||
// }
|
||||
|
||||
// // Update x-axis
|
||||
// mainGroup.select(".x-axis").call(
|
||||
// d3.axisBottom(newXScale)
|
||||
// .ticks(8)
|
||||
// .tickFormat(d3.timeFormat("%H:%M:%S"))
|
||||
// );
|
||||
// };
|
||||
|
||||
// // Apply updates
|
||||
// updateVisualization();
|
||||
// });
|
||||
|
||||
// // Add zoom behavior
|
||||
// svg.call(zoom);
|
||||
|
||||
// // Create line generator
|
||||
// const line = d3.line()
|
||||
// .x(d => xScale(d.time))
|
||||
// .y(d => yScoreScale(d.score))
|
||||
// .curve(d3.curveMonotoneX);
|
||||
|
||||
// // Add grid lines
|
||||
// const yTicks = yScoreScale.ticks(6);
|
||||
// mainGroup.selectAll(".grid-line-y")
|
||||
// .data(yTicks)
|
||||
// .enter()
|
||||
// .append("line")
|
||||
// .attr("class", "grid-line")
|
||||
// .attr("x1", 0)
|
||||
// .attr("x2", width)
|
||||
// .attr("y1", d => yScoreScale(d))
|
||||
// .attr("y2", d => yScoreScale(d));
|
||||
|
||||
// // Add score lines and dots
|
||||
// processedData.forEach((d, i) => {
|
||||
// // Score line
|
||||
// mainGroup.append("path")
|
||||
// .datum(d.scores)
|
||||
// .attr("class", `score-line`)
|
||||
// .attr("d", line);
|
||||
|
||||
// // Score dots
|
||||
// mainGroup.selectAll(`.score-dot-group-${i}`)
|
||||
// .data(d.scores)
|
||||
// .enter()
|
||||
// .append("circle")
|
||||
// .attr("class", `score-dot`)
|
||||
// .attr("cx", s => xScale(s.time))
|
||||
// .attr("cy", s => yScoreScale(s.score));
|
||||
|
||||
// // Time blocks with full data for zoom tracking
|
||||
// mainGroup.append("rect")
|
||||
// .datum(d)
|
||||
// .attr("class", `time-block`)
|
||||
// .attr("x", xScale(d.startTime))
|
||||
// .attr("y", plotHeight * 0.7)
|
||||
// .attr("width", Math.max(2, xScale(d.endTime) - xScale(d.startTime)))
|
||||
// .attr("height", blockHeight);
|
||||
|
||||
// // Conditional labels
|
||||
// if (showLabels) {
|
||||
// mainGroup.append("text")
|
||||
// .datum(d)
|
||||
// .attr("class", "block-start-label")
|
||||
// .attr("x", xScale(d.startTime))
|
||||
// .attr("y", plotHeight * 0.7 + blockHeight + 15)
|
||||
// .attr("text-anchor", "start")
|
||||
// .style("font-size", "10px")
|
||||
// .text(d.startTime.toLocaleTimeString());
|
||||
|
||||
// mainGroup.append("text")
|
||||
// .datum(d)
|
||||
// .attr("class", "block-end-label")
|
||||
// .attr("x", xScale(d.endTime))
|
||||
// .attr("y", plotHeight * 0.7 + blockHeight + 15)
|
||||
// .attr("text-anchor", "end")
|
||||
// .style("font-size", "10px")
|
||||
// .text(d.endTime.toLocaleTimeString());
|
||||
// }
|
||||
// });
|
||||
|
||||
// // Y-axis for scores
|
||||
// const yAxis = d3.axisLeft(yScoreScale)
|
||||
// .ticks(6)
|
||||
// .tickFormat(d3.format(".4f"));
|
||||
|
||||
// mainGroup.append("g")
|
||||
// .attr("class", "axis y-axis")
|
||||
// .call(yAxis);
|
||||
|
||||
// // Y-axis label
|
||||
// mainGroup.append("text")
|
||||
// .attr("transform", "rotate(-90)")
|
||||
// .attr("y", -40)
|
||||
// .attr("x", -plotHeight / 2)
|
||||
// .style("text-anchor", "middle")
|
||||
// .text("Embed Score");
|
||||
|
||||
// // X-axis
|
||||
// const xAxis = d3.axisBottom(xScale)
|
||||
// .ticks(8)
|
||||
// .tickFormat(d3.timeFormat("%H:%M:%S"));
|
||||
|
||||
// mainGroup.append("g")
|
||||
// .attr("class", "axis x-axis")
|
||||
// .attr("transform", `translate(0, ${plotHeight})`)
|
||||
// .call(xAxis);
|
||||
|
||||
// }, [data, showLabels, zoomLevel]);
|
||||
|
||||
// // Zoom control handler
|
||||
// const handleZoom = (factor) => {
|
||||
// const currentZoom = zoomLevel;
|
||||
// const newZoom = Math.max(1, Math.min(20, currentZoom * factor));
|
||||
// setZoomLevel(newZoom);
|
||||
// };
|
||||
|
||||
// // Reset zoom
|
||||
// const handleResetZoom = () => {
|
||||
// setZoomLevel(1);
|
||||
// };
|
||||
|
||||
// return (
|
||||
// <div className="timeline-container">
|
||||
// <div className="timeline-controls">
|
||||
// <button onClick={() => handleZoom(1.5)}>Zoom In</button>
|
||||
// <button onClick={() => handleZoom(1 / 1.5)}>Zoom Out</button>
|
||||
// <button onClick={handleResetZoom}>Reset Zoom</button>
|
||||
// <button onClick={() => setShowLabels(!showLabels)}>
|
||||
// {showLabels ? 'Hide Labels' : 'Show Labels'}
|
||||
// </button>
|
||||
// </div>
|
||||
// <div ref={containerRef} className="svg-container">
|
||||
// <svg ref={svgRef}></svg>
|
||||
// </div>
|
||||
// </div>
|
||||
// );
|
||||
// };
|
||||
function onChartReady(echarts) {
|
||||
// Chart is ready
|
||||
}
|
||||
|
||||
const onEvents = { click: onChartClick };
|
||||
window.chartRef2 = chartRef;
|
||||
// --- Render ---
|
||||
return (
|
||||
<ReactECharts
|
||||
ref={chartRef}
|
||||
onChartReady={onChartReady}
|
||||
onEvents={onEvents}
|
||||
option={option}
|
||||
style={{ width: "100%", height: "100%" }}
|
||||
/>
|
||||
);
|
||||
});
|
||||
export default EmbedTimeline;
|
||||
|
||||
79
SearchFrontend/search_ui/src/components/StatusDisplay.jsx
Normal file
79
SearchFrontend/search_ui/src/components/StatusDisplay.jsx
Normal file
@@ -0,0 +1,79 @@
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
export default function StatusesDisplayHUD({ statusMessages }) {
|
||||
|
||||
|
||||
const msg = {};
|
||||
|
||||
statusMessages.forEach((m) => {
|
||||
let when_key = 'other'
|
||||
if (m['task'] == 'SCHEDULED')
|
||||
m['when'].forEach(( w ) => { msg[w] = 'Scheduled' })
|
||||
else {
|
||||
if ('when' in m)
|
||||
when_key = m['when']
|
||||
msg[when_key] = m['task']
|
||||
}
|
||||
|
||||
|
||||
|
||||
});
|
||||
|
||||
return (
|
||||
<div>
|
||||
{Object.entries(msg).map(([when, messages], idx) => (
|
||||
<StatusDisplay key={when} when={when} message={messages} />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export function StatusDisplay({when, message }) {
|
||||
let msg_show = ''
|
||||
|
||||
msg_show = when + ': ' + message
|
||||
|
||||
|
||||
|
||||
return (
|
||||
<div
|
||||
className="status-message"
|
||||
style={{
|
||||
color: "#fff",
|
||||
background: "#23272f",
|
||||
padding: "8px",
|
||||
margin: "4px 0",
|
||||
borderRadius: "4px",
|
||||
minHeight: "20px",
|
||||
}}
|
||||
>
|
||||
{msg_show}
|
||||
</div>
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// <div
|
||||
// className="status-messages"
|
||||
// style={{
|
||||
// color: "#fff",
|
||||
// background: "#23272f",
|
||||
// padding: "8px",
|
||||
// margin: "8px 0",
|
||||
// borderRadius: "4px",
|
||||
// minHeight: "40px",
|
||||
// }}
|
||||
// >
|
||||
// {statusMessages.map((msg, idx) => (
|
||||
// <div key={idx}>{msg}</div>
|
||||
// ))}
|
||||
// </div>
|
||||
72
SearchFrontend/search_ui/src/components/VideoPlayer.jsx
Normal file
72
SearchFrontend/search_ui/src/components/VideoPlayer.jsx
Normal file
@@ -0,0 +1,72 @@
|
||||
import React, { useRef, useEffect, forwardRef, useImperativeHandle } from "react";
|
||||
import videojs from "video.js";
|
||||
import "video.js/dist/video-js.css";
|
||||
|
||||
const VideoPlayer = function VideoPlayer({videoRef, playerInstanceRef, setMarkerTimeFunc}) {
|
||||
|
||||
|
||||
|
||||
useEffect(() => {
|
||||
// Prevent double init in StrictMode
|
||||
if (!playerInstanceRef.current && videoRef.current) {
|
||||
playerInstanceRef.current = videojs(videoRef.current, {
|
||||
controls: true,
|
||||
preload: "auto",
|
||||
autoplay: true,
|
||||
});
|
||||
|
||||
playerInstanceRef.current.on('timeupdate', async function (event) {
|
||||
|
||||
const res = await fetch('api/events/video_step', {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ timestamp: this.currentTime() }),
|
||||
});
|
||||
if (!res.ok) throw new Error(`HTTP error: ${res.status}`);
|
||||
const { path, timeoffset, do_update, absolute_time} = await res.json();
|
||||
setMarkerTimeFunc(1000*absolute_time)
|
||||
if (do_update) {
|
||||
playerInstanceRef.current.src({ src: 'api/' + path, type: "video/mp4" });
|
||||
|
||||
// Seek after metadata is loaded
|
||||
playerInstanceRef.current.on("loadedmetadata", () => {
|
||||
playerInstanceRef.current.currentTime(timeoffset);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (playerInstanceRef.current) {
|
||||
playerInstanceRef.current.dispose();
|
||||
playerInstanceRef.current = null;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
|
||||
return (
|
||||
<div style={{ width: "100%", height: "100%" }}>
|
||||
<div data-vjs-player style={{ width: "100%", height: "100%" }}>
|
||||
<video
|
||||
ref={videoRef}
|
||||
className="video-js vjs-big-play-centered"
|
||||
playsInline
|
||||
style={{
|
||||
width: "100%",
|
||||
height: "100%",
|
||||
backgroundColor: "black",
|
||||
objectFit: "contain"
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default VideoPlayer;
|
||||
@@ -3,10 +3,11 @@
|
||||
line-height: 1.5;
|
||||
font-weight: 400;
|
||||
|
||||
color-scheme: light dark;
|
||||
color-scheme: light dark ;
|
||||
color: rgba(255, 255, 255, 0.87);
|
||||
background-color: #242424;
|
||||
|
||||
|
||||
font-synthesis: none;
|
||||
text-rendering: optimizeLegibility;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
|
||||
@@ -4,7 +4,7 @@ import './index.css'
|
||||
import App from './App.jsx'
|
||||
|
||||
createRoot(document.getElementById('root')).render(
|
||||
<StrictMode>
|
||||
// <StrictMode>
|
||||
<App />
|
||||
</StrictMode>,
|
||||
// </StrictMode>,
|
||||
)
|
||||
|
||||
@@ -4,5 +4,13 @@ import react from '@vitejs/plugin-react-swc'
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
server: {'host':'0.0.0.0'}
|
||||
server: {'host':'0.0.0.0',
|
||||
'proxy':{
|
||||
'/api': {
|
||||
target: 'http://192.168.1.242:5003',
|
||||
changeOrigin: true,
|
||||
rewrite: (path) => path.replace(/^\/api/, ''),
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
10
SearchInterface.code-workspace
Normal file
10
SearchInterface.code-workspace
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": "."
|
||||
},
|
||||
{
|
||||
"path": "../../../Seafile/Designs/Code/Python/CommonCode"
|
||||
}
|
||||
]
|
||||
}
|
||||
13
SearchScratch/test_recreate_cache.py
Normal file
13
SearchScratch/test_recreate_cache.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import sys, os
|
||||
sys.path.append("/home/thebears/Web/Nuggets/SearchInterface/SearchUtil")
|
||||
sys.path.append("/home/thebears/Web/Nuggets/SearchInterface/VectorService/util")
|
||||
import embed_scores as ES
|
||||
|
||||
cd = '/srv/ftp_tcc/leopards1/2025/09/13/'
|
||||
o = ES.calculate_embedding_score_in_folder(cd, 0.1, query='Two cats');
|
||||
|
||||
# %%
|
||||
from CommonCode.video_meta import FTPVideo
|
||||
f='/srv/ftp_tcc/leopards1/2025/09/13/Leopards1_00_20250913135952.mp4'
|
||||
c = FTPVideo(f)
|
||||
c.embeddings
|
||||
182
SearchScratch/test_seek.py
Normal file
182
SearchScratch/test_seek.py
Normal file
@@ -0,0 +1,182 @@
|
||||
# %%
|
||||
import sys, os
|
||||
sys.path.append("/home/thebears/Web/Nuggets/SearchInterface/SearchUtil")
|
||||
sys.path.append("/home/thebears/Web/Nuggets/SearchInterface/VectorService/util")
|
||||
import embed_scores as ES
|
||||
# %%
|
||||
query = 'Cat and human'
|
||||
c_dir = '/srv/ftp_tcc/leopards1/2025/09/08'
|
||||
threshold=0.10
|
||||
|
||||
results = ES.calculate_embedding_score_in_folder(c_dir, threshold, query)
|
||||
print(len(results['videos']))
|
||||
|
||||
# %%
|
||||
c_dir = '/srv/ftp_tcc/leopards1/2025/09/08'
|
||||
query_vector = None
|
||||
og_dir = c_dir
|
||||
|
||||
if query_vector is None:
|
||||
query_vector = ES.get_query_vector(query)
|
||||
|
||||
candidate_dirs = list()
|
||||
candidate_dirs.append(og_dir)
|
||||
candidate_dirs.append(og_dir.replace('/srv/ftp_tcc','/mnt/hdd_24tb_1/videos/ftp'))
|
||||
candidate_dirs.append(og_dir.replace('/srv/ftp','/mnt/hdd_24tb_1/videos/ftp'))
|
||||
|
||||
c_dir = None
|
||||
for candidate in candidate_dirs:
|
||||
if os.path.exists(candidate):
|
||||
c_dir = candidate
|
||||
break
|
||||
if c_dir is None:
|
||||
# return []
|
||||
pass
|
||||
from embed_scores import *
|
||||
redis_key = 'helllo'
|
||||
vec_cache_str = md5(query_vector).hexdigest()
|
||||
cache_file_loc = os.path.join(c_dir, 'embedding_scores@'+str(threshold)+'@'+vec_cache_str+'.pkl')
|
||||
|
||||
|
||||
|
||||
vec_rep = get_vector_representation(c_dir, redis_key = redis_key)
|
||||
query_scores = get_scores_embedding_c_dir(c_dir, tuple(query_vector.tolist()[0]))
|
||||
|
||||
video_json_info = list()
|
||||
idces_keep = np.where(query_scores > threshold)[0]
|
||||
|
||||
video_id = vec_rep['idces'][idces_keep]
|
||||
videos_that_match = np.unique(video_id)
|
||||
|
||||
id_extract_video_level = np.where(np.isin(vec_rep['idces'], videos_that_match))[0]
|
||||
|
||||
idces_split = np.where(np.diff(vec_rep['idces'][id_extract_video_level]) !=0)[0] + 1
|
||||
subset_timestampsF = np.split(vec_rep['timestamps'][id_extract_video_level], idces_split)
|
||||
|
||||
|
||||
|
||||
for idx, subset_t in enumerate(subset_timestampsF):
|
||||
if len(subset_t) == 0:
|
||||
continue
|
||||
|
||||
min_t = min(subset_t)
|
||||
max_t = max(subset_t)
|
||||
print(idx, max_t - min_t)
|
||||
idces_curr = np.where(np.logical_and(vec_rep['timestamps'] > min_t , vec_rep['timestamps'] < max_t))[0]
|
||||
if len(idces_curr) == 0:
|
||||
continue
|
||||
|
||||
unq_vids = np.unique(vec_rep['idces'][idces_curr])
|
||||
subset_idx = np.where(np.isin(vec_rep['idces'],unq_vids))[0]
|
||||
|
||||
subset_idces = vec_rep['idces'][subset_idx]
|
||||
subset_timestamps = vec_rep['timestamps'][subset_idx]
|
||||
subset_scores = query_scores[subset_idx]
|
||||
idx_split = np.where(np.diff(vec_rep['idces'][subset_idx]) !=0)[0]+1
|
||||
|
||||
split_idces = np.split(subset_idces, idx_split)
|
||||
split_timestamps = np.split(subset_timestamps, idx_split)
|
||||
split_scores = np.split(subset_scores, idx_split)
|
||||
split_files = [vec_rep['source_files'][x[0]] for x in split_idces]
|
||||
|
||||
for s_file, s_scores, s_tstamps, s_idces in zip(split_files, split_scores, split_timestamps, split_idces):
|
||||
start_time = float(min(s_tstamps))
|
||||
end_time = float(max(s_tstamps))
|
||||
|
||||
frame_time = (s_tstamps - start_time).tolist()
|
||||
embed_scores = s_scores.tolist()
|
||||
|
||||
c_data = {'file_name': str(s_file), 'start_time':start_time, 'end_time':end_time, 'embed_scores':{'time':frame_time, 'score':embed_scores}}
|
||||
video_json_info.append(c_data)
|
||||
|
||||
|
||||
print(len(video_json_info))
|
||||
|
||||
# %%
|
||||
query = 'A cat and a human'
|
||||
c_dirs = ['/mnt/hdd_24tb_1/videos/ftp/leopards2/2025/08/26','/srv/ftp_tcc/leopards1/2025/08/27','/srv/ftp_tcc/leopards1/2025/08/28','/srv/ftp_tcc/leopards1/2025/08/29']
|
||||
|
||||
threshold = 0.10
|
||||
folder_scores = ES.calculate_embedding_score_in_folders( tuple(c_dirs), threshold = threshold, query = query )
|
||||
folder_scores['breaks'] = ES.add_breaks_between_videos(folder_scores)
|
||||
# %%
|
||||
target_tstamp = 1756332686.5805347
|
||||
|
||||
|
||||
matching_file = None
|
||||
for video_file in folder_scores['videos']:
|
||||
start_time = video_file['start_time']
|
||||
end_time = video_file['end_time']
|
||||
|
||||
if target_tstamp > start_time and target_tstamp < end_time:
|
||||
matching_file = video_file
|
||||
|
||||
if matching_file is not None:
|
||||
fname = video_file['file_name']
|
||||
offset = target_tstamp - start_time
|
||||
pelse:
|
||||
fname = 'None Found'
|
||||
offset = -1
|
||||
|
||||
web_name = os.path.basename(fname)
|
||||
# %%
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import embed_scores as ES
|
||||
|
||||
result = ES.get_matching_file_for_tstamp(target_tstamp + 500, folder_scores)
|
||||
print(result)
|
||||
# %%
|
||||
|
||||
import requests
|
||||
folder_scores = requests.get('http://192.168.1.242:5004/videos.json').json()
|
||||
print(len(
|
||||
|
||||
# %%
|
||||
folder_scores = requests.get('http://192.168.1.242:5004/videos.json', params={'threshold':0.09}).json()
|
||||
print(len(folder_scores['videos']))
|
||||
# %%
|
||||
|
||||
new_folder_scores = folder_scores.copy()
|
||||
import lttb
|
||||
min_rows = 15
|
||||
factor = 0.1
|
||||
for x in new_folder_scores['videos']:
|
||||
data = np.asarray( [x['embed_scores']['time'], x['embed_scores']['score']])
|
||||
amt = max(min_rows, int(factor*data.shape[1]))
|
||||
|
||||
if data.shape[1] > amt:
|
||||
sampled = lttb.downsample(data.T, amt)
|
||||
else:
|
||||
sampled = data.T
|
||||
|
||||
time = sampled[:,0].tolist()
|
||||
scores = sampled[:,1].tolist()
|
||||
|
||||
|
||||
# %%
|
||||
|
||||
import pickle
|
||||
cache_file_loc = '/srv/ftp_tcc/leopards1/2025/09/09/embedding_scores@0.1@de376b3b6e90315477571ef6e82e841c.pkl'
|
||||
c_dir = os.path.dirname(cache_file_loc)
|
||||
|
||||
|
||||
|
||||
# %%
|
||||
with open(cache_file_loc,'rb') as f:
|
||||
video_json_info = pickle.load(f)
|
||||
|
||||
|
||||
files_in_cache = {os.path.splitext(os.path.basename(x['file_name']))[0] for x in video_json_info}
|
||||
lsd_dir = os.listdir(c_dir)
|
||||
files_on_disk = {x.split('.')[0] for x in lsd_dir if x.endswith('oclip_embeds.npz')}
|
||||
print(len(files_on_disk), len(files_in_cache))
|
||||
|
||||
|
||||
|
||||
p# %%
|
||||
import embed_scores as ES
|
||||
a_mov = '/srv/ftp_tcc/leopards1/2025/09/09/Leopards1_00_20250909045221.mp4'
|
||||
Submodule SearchUtil deleted from ac52bd43fb
162
VectorService/util/.gitignore
vendored
Normal file
162
VectorService/util/.gitignore
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
|
||||
### Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
11
VectorService/util/CommonCode.code-workspace
Normal file
11
VectorService/util/CommonCode.code-workspace
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": "../../../../../Seafile/Designs/Code/Python/CommonCode"
|
||||
},
|
||||
{
|
||||
"path": "../.."
|
||||
}
|
||||
],
|
||||
"settings": {}
|
||||
}
|
||||
339
VectorService/util/embed_scores.py
Normal file
339
VectorService/util/embed_scores.py
Normal file
@@ -0,0 +1,339 @@
|
||||
from CommonCode.video_meta import FTPVideo
|
||||
from CommonCode.settings import get_logger
|
||||
import logging
|
||||
import json
|
||||
import datetime as dt
|
||||
import functools
|
||||
import requests
|
||||
import numpy as np
|
||||
from pqdm.processes import pqdm
|
||||
from multiprocessing import Pool
|
||||
import os
|
||||
import lttb
|
||||
import pickle
|
||||
import redis
|
||||
from hashlib import md5
|
||||
|
||||
|
||||
r = redis.Redis(host='localhost', port=6379, db=15)
|
||||
|
||||
logger = get_logger(__name__,'/var/log/vector_search_logs/util_embed_scores', stdout=True, systemd=False, level = logging.INFO)
|
||||
|
||||
|
||||
def get_matching_file_for_tstamp(target_tstamp, folder_scores):
|
||||
matching_file = None
|
||||
for video_file in folder_scores['videos']:
|
||||
start_time = video_file['start_time']
|
||||
end_time = video_file['end_time']
|
||||
|
||||
if target_tstamp > start_time and target_tstamp < end_time:
|
||||
matching_file = video_file
|
||||
|
||||
if matching_file is not None:
|
||||
fname = matching_file['file_name']
|
||||
offset = target_tstamp - matching_file['start_time']
|
||||
else:
|
||||
fname = 'None Found'
|
||||
offset = -1
|
||||
|
||||
web_name = 'media/'+os.path.basename(fname)
|
||||
return dict(full_path = fname, path=web_name, timeoffset = offset)
|
||||
|
||||
|
||||
def get_vec_rep_file_loc(c_dir):
|
||||
vec_rep_file = os.path.join(c_dir, 'vec_rep.npz')
|
||||
return vec_rep_file
|
||||
|
||||
def get_vector_representation(c_dir, force_compute = False, redis_key = 'compute_log'):
|
||||
message = {'task':'VECTOR_CALC_IN_FOLDER_START', 'when': str(c_dir), 'time': dt.datetime.now().timestamp()}
|
||||
r.rpush(redis_key, json.dumps(message))
|
||||
|
||||
vec_rep_file = get_vec_rep_file_loc(c_dir)
|
||||
if os.path.exists(vec_rep_file) and not force_compute:
|
||||
try:
|
||||
result = dict(np.load(vec_rep_file))
|
||||
message = {'task':'VECTOR_CALC_IN_FOLDER_DONE', 'when': str(c_dir), 'time': dt.datetime.now().timestamp(), 'precomputed':True}
|
||||
r.rpush(redis_key, json.dumps(message))
|
||||
return result
|
||||
except:
|
||||
os.remove(vec_rep_file)
|
||||
|
||||
|
||||
ff = list()
|
||||
for root, dirs, files in os.walk(c_dir):
|
||||
for f in files:
|
||||
if f.endswith('.mp4') and '_reduced' not in f:
|
||||
ff.append(os.path.join(root, f))
|
||||
|
||||
videos = list()
|
||||
for x in ff:
|
||||
cvid = FTPVideo(x)
|
||||
videos.append(FTPVideo(x))
|
||||
|
||||
sorted_videos = sorted(videos)
|
||||
|
||||
all_cat = list()
|
||||
all_idx = list()
|
||||
all_source = list()
|
||||
all_tstamps = list()
|
||||
enu = 0
|
||||
for idx, x in enumerate(sorted_videos):
|
||||
|
||||
try:
|
||||
hh = x.embeddings
|
||||
except Exception as e:
|
||||
hh = None
|
||||
|
||||
if hh is not None:
|
||||
n_emb = FTPVideo.vec_norm(hh['embeds'])
|
||||
all_cat.append(n_emb)
|
||||
all_idx.append( enu * np.ones(n_emb.shape[0], dtype=np.int64) )
|
||||
all_source.append(x.real_path)
|
||||
all_tstamps.append( [x.timestamp() for x in hh['frame_time']])
|
||||
enu +=1
|
||||
|
||||
message = {'task':'VECTOR_CALC_IN_FOLDER_BUMP', 'progress': idx+1, 'how_many': len(sorted_videos), 'time': dt.datetime.now().timestamp()}
|
||||
r.rpush(redis_key, json.dumps(message))
|
||||
|
||||
if len(all_cat) == 0:
|
||||
return []
|
||||
all_embeds = np.vstack(all_cat)
|
||||
all_embeds = FTPVideo.vec_norm(all_embeds)
|
||||
all_idces = np.hstack(all_idx)
|
||||
all_times = np.hstack(all_tstamps)
|
||||
|
||||
np.savez(vec_rep_file, embeds = all_embeds, idces= all_idces, timestamps = all_times, source_files = all_source)
|
||||
message = {'task':'VECTOR_CALC_IN_FOLDER_DONE', 'when': str(c_dir), 'time': dt.datetime.now().timestamp()}
|
||||
return dict( embeds = all_embeds, idces= all_idces, timestamps = all_times, source_files = all_source)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def get_scores_embedding_c_dir(c_dir, query_vector, redis_key = 'compute_log'):
|
||||
vec_rep = get_vector_representation(c_dir, redis_key=redis_key)
|
||||
query_scores = (query_vector @ vec_rep['embeds'].T).squeeze()
|
||||
return query_scores
|
||||
|
||||
@functools.lru_cache
|
||||
def get_query_vector(query):
|
||||
vec_form = requests.get('http://192.168.1.242:53004/encode',params={'query':query}).json()['vector'][0]
|
||||
vec_search = np.asarray(vec_form)
|
||||
query_vector = FTPVideo.vec_norm(vec_search[None,:])
|
||||
return query_vector
|
||||
|
||||
|
||||
|
||||
def calculate_embedding_score_in_folders(c_dirs, threshold, query = None, query_vector = None, redis_key = 'compute_log'):
|
||||
result_list = list()
|
||||
query_vector = None
|
||||
if query_vector is None:
|
||||
query_vector = get_query_vector(query)
|
||||
|
||||
|
||||
# kwargs = [{'c_dir':x, 'threshold':threshold, 'query': query} for x in c_dirs]
|
||||
args = [(x, threshold, query, None, logger, redis_key) for x in c_dirs]
|
||||
|
||||
logger.info(f"CALCULATING FOR {args}")
|
||||
with Pool(processes=8) as pool:
|
||||
out = pool.starmap(calculate_embedding_score_in_folder, args)
|
||||
logger.info(f"DONE CALCULATING FOR {args}")
|
||||
|
||||
for x in out:
|
||||
try:
|
||||
result_list.extend(x['videos'])
|
||||
except Exception as e:
|
||||
print(e, x)
|
||||
|
||||
|
||||
|
||||
return {'videos':result_list}
|
||||
|
||||
|
||||
def collapse_scores_to_maxmin_avg(folder_scores):
|
||||
|
||||
result = list()
|
||||
for c_data in folder_scores['videos']:
|
||||
new_d = c_data.copy()
|
||||
|
||||
scores = new_d['embed_scores']['score']
|
||||
max_score = max(scores)
|
||||
min_score = min(scores)
|
||||
max_score_idx = scores.index(max_score)
|
||||
min_score_idx = scores.index(min_score)
|
||||
max_score_time = new_d['embed_scores']['time'][max_score_idx]
|
||||
min_score_time = new_d['embed_scores']['time'][min_score_idx]
|
||||
new_d['embed_scores']['score'] = [min_score, max_score, max_score_time, min_score_time]
|
||||
new_d['embed_scores']['time'] = max(new_d['embed_scores']['time'])
|
||||
result.append(new_d)
|
||||
|
||||
return result
|
||||
# c_data = {'file_name': str(s_file), 'start_time':start_time, 'end_time':end_time, 'embed_scores':{'time':frame_time, 'score':embed_scores}}
|
||||
# video_json_info.append(c_data)
|
||||
|
||||
# to_write = {'source_files': vec_rep['source_files'], 'videos': video_json_info}
|
||||
# with open(cache_file_loc, 'wb') as f:
|
||||
# logger.info(f"WRITING EMBEDDING SCORE TO CACHE {cache_file_loc}")
|
||||
# pickle.dump(to_write, f)
|
||||
|
||||
|
||||
def calculate_embedding_score_in_folder(og_dir, threshold, query = None, query_vector = None, logger = logger, redis_key = 'compute_log'):
|
||||
message = {'task':'SCORE_CALC_IN_FOLDER_START', 'when': str(og_dir), 'time': dt.datetime.now().timestamp()}
|
||||
r.rpush(redis_key, json.dumps(message))
|
||||
|
||||
if query_vector is None:
|
||||
query_vector = get_query_vector(query)
|
||||
|
||||
candidate_dirs = list()
|
||||
candidate_dirs.append(og_dir)
|
||||
candidate_dirs.append(og_dir.replace('/srv/ftp_tcc','/mnt/hdd_24tb_1/videos/ftp'))
|
||||
candidate_dirs.append(og_dir.replace('/srv/ftp','/mnt/hdd_24tb_1/videos/ftp'))
|
||||
|
||||
c_dir = None
|
||||
for candidate in candidate_dirs:
|
||||
if os.path.exists(candidate):
|
||||
c_dir = candidate
|
||||
break
|
||||
if c_dir is None:
|
||||
return []
|
||||
|
||||
vec_cache_str = md5(query_vector).hexdigest()
|
||||
cache_file_loc = os.path.join(c_dir, 'embedding_scores@'+str(threshold)+'@'+vec_cache_str+'.pkl')
|
||||
|
||||
if os.path.exists(cache_file_loc):
|
||||
logger.info(f"TRYING TO LOAD CACHE {cache_file_loc}")
|
||||
try:
|
||||
|
||||
with open(cache_file_loc, 'rb') as f:
|
||||
video_json_info = pickle.load(f)
|
||||
files_in_cache = {os.path.splitext(os.path.basename(x))[0] for x in video_json_info.get('source_files',[])}
|
||||
lsd_dir = os.listdir(c_dir)
|
||||
files_on_disk = {x.split(".")[0] for x in lsd_dir if x.endswith('oclip_embeds.npz')}
|
||||
|
||||
|
||||
if files_on_disk == files_in_cache:
|
||||
logger.info(f"LOADED EMBEDDING SCORE FROM CACHE {cache_file_loc}")
|
||||
message = {'task':'SCORE_CALC_IN_FOLDER_DONE', 'when': str(c_dir), 'time': dt.datetime.now().timestamp(), 'precomputed': True}
|
||||
r.rpush(redis_key, json.dumps(message))
|
||||
return video_json_info
|
||||
else:
|
||||
logger.info(f"CACHE FILE IS OLD, DELETING VEC REP FILE AND RECREATING {cache_file_loc}")
|
||||
os.remove( get_vec_rep_file_loc(c_dir))
|
||||
except Exception as e:
|
||||
logger.info(f"CACHE FILE IS CORRUPT, RECREATING {cache_file_loc} {e}")
|
||||
os.remove(cache_file_loc)
|
||||
|
||||
pass
|
||||
|
||||
|
||||
vec_rep = get_vector_representation(c_dir, redis_key = redis_key)
|
||||
query_scores = get_scores_embedding_c_dir(c_dir, tuple(query_vector.tolist()[0]), redis_key = redis_key)
|
||||
|
||||
video_json_info = list()
|
||||
idces_keep = np.where(query_scores > threshold)[0]
|
||||
|
||||
video_id = vec_rep['idces'][idces_keep]
|
||||
videos_that_match = np.unique(video_id)
|
||||
|
||||
# subset_timestampsF = list()
|
||||
# for s in videos_that_match:
|
||||
# idces_entry = np.where(vec_rep['idces'] == s)[0]
|
||||
# min_idces = idces_entry[0]
|
||||
# max_idces = idces_entry[-1]
|
||||
# subset_timestampsF.append( [ vec_rep['timestamps'][min_idces], vec_rep['timestamps'][max_idces]])
|
||||
|
||||
id_extract_video_level = np.where(np.isin(vec_rep['idces'], videos_that_match))[0]
|
||||
idces_split = np.where(np.diff(vec_rep['idces'][id_extract_video_level]) !=0)[0] + 1
|
||||
subset_timestampsF = np.split(vec_rep['timestamps'][id_extract_video_level], idces_split)
|
||||
|
||||
|
||||
|
||||
|
||||
for subset_t in subset_timestampsF:
|
||||
if len(subset_t) == 0:
|
||||
continue
|
||||
|
||||
min_t = min(subset_t)
|
||||
max_t = max(subset_t)
|
||||
idces_curr = np.where(np.logical_and(vec_rep['timestamps'] > min_t , vec_rep['timestamps'] < max_t))[0]
|
||||
if len(idces_curr) == 0:
|
||||
continue
|
||||
|
||||
unq_vids = np.unique(vec_rep['idces'][idces_curr])
|
||||
subset_idx = np.where(np.isin(vec_rep['idces'],unq_vids))[0]
|
||||
|
||||
subset_idces = vec_rep['idces'][subset_idx]
|
||||
subset_timestamps = vec_rep['timestamps'][subset_idx]
|
||||
subset_scores = query_scores[subset_idx]
|
||||
idx_split = np.where(np.diff(vec_rep['idces'][subset_idx]) !=0)[0]+1
|
||||
|
||||
split_idces = np.split(subset_idces, idx_split)
|
||||
split_timestamps = np.split(subset_timestamps, idx_split)
|
||||
split_scores = np.split(subset_scores, idx_split)
|
||||
split_files = [vec_rep['source_files'][x[0]] for x in split_idces]
|
||||
|
||||
for s_file, s_scores, s_tstamps, s_idces in zip(split_files, split_scores, split_timestamps, split_idces):
|
||||
start_time = float(min(s_tstamps))
|
||||
end_time = float(max(s_tstamps))
|
||||
|
||||
frame_time = (s_tstamps - start_time).tolist()
|
||||
embed_scores = s_scores.tolist()
|
||||
|
||||
c_data = {'file_name': str(s_file), 'start_time':start_time, 'end_time':end_time, 'embed_scores':{'time':frame_time, 'score':embed_scores}}
|
||||
video_json_info.append(c_data)
|
||||
|
||||
message = {'task':'SCORE_CALC_IN_FOLDER_DONE', 'when': str(c_dir), 'time': dt.datetime.now().timestamp()}
|
||||
r.rpush(redis_key, json.dumps(message))
|
||||
to_write = {'source_files': vec_rep['source_files'], 'videos': video_json_info}
|
||||
with open(cache_file_loc, 'wb') as f:
|
||||
logger.info(f"WRITING EMBEDDING SCORE TO CACHE {cache_file_loc}")
|
||||
pickle.dump(to_write, f)
|
||||
|
||||
return to_write
|
||||
|
||||
|
||||
def get_matching_file_given_filename(web_name, folder_scores):
|
||||
file_name = None
|
||||
for x in folder_scores['videos']:
|
||||
if x['file_name'].endswith(web_name):
|
||||
file_name = x['file_name']
|
||||
|
||||
|
||||
candidate_files = list()
|
||||
candidate_files.append(file_name)
|
||||
candidate_files.append(file_name.replace('/srv/ftp_tcc','/mnt/hdd_24tb_1/videos/ftp'))
|
||||
candidate_files.append(file_name.replace('/srv/ftp','/mnt/hdd_24tb_1/videos/ftp'))
|
||||
|
||||
file_name = None
|
||||
for candidate in candidate_files:
|
||||
if os.path.exists(candidate):
|
||||
file_name = candidate
|
||||
break
|
||||
|
||||
|
||||
|
||||
|
||||
return file_name
|
||||
|
||||
|
||||
|
||||
#c_dirs = ['/mnt/hdd_24tb_1/videos/ftp/leopards2/2025/08/26','/srv/ftp_tcc/leopards1/2025/08/27','/srv/ftp_tcc/leopards1/2025/08/28','/srv/ftp_tcc/leopards1/2025/08/29']
|
||||
#op = calculate_embedding_score_in_folders( tuple(c_dirs), 0.10, query = 'A cat and human')
|
||||
|
||||
def add_breaks_between_videos(op, threshold_to_split_seconds = 30*60): # 30 minutes):
|
||||
ranges = list()
|
||||
for vids in op['videos']:
|
||||
ranges.append( (vids['start_time'], vids['end_time']) )
|
||||
|
||||
breaks = list()
|
||||
for idx in range(len(ranges)-1):
|
||||
current_range = ranges[idx]
|
||||
next_range = ranges[idx+1]
|
||||
|
||||
end_now = current_range[1]
|
||||
start_next = next_range[0]
|
||||
|
||||
if (start_next - end_now) > threshold_to_split_seconds:
|
||||
breaks.append((end_now, start_next))
|
||||
|
||||
return breaks
|
||||
96
VectorService/vector_service.py
Normal file
96
VectorService/vector_service.py
Normal file
@@ -0,0 +1,96 @@
|
||||
from typing import Union, Optional, List
|
||||
from pydantic import BaseModel
|
||||
from fastapi import FastAPI, Request, Depends
|
||||
from CommonCode.settings import get_logger
|
||||
import logging
|
||||
from fastapi.responses import StreamingResponse
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
from util import embed_scores as ES
|
||||
from fastapi_server_session import SessionManager, RedisSessionInterface, Session
|
||||
import redis
|
||||
from datetime import timedelta
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
session_manager = SessionManager(
|
||||
interface=RedisSessionInterface(redis.from_url("redis://localhost"))
|
||||
)
|
||||
|
||||
logger = get_logger(__name__,'/var/log/vector_search_logs/main_embed_scores', stdout=True, systemd=False, level = logging.INFO)
|
||||
r = redis.Redis(host='localhost', port=6379, db=15)
|
||||
|
||||
|
||||
class VideosPostRequest(BaseModel):
|
||||
query: str = "A cat and a human",
|
||||
threshold: float = 0.10,
|
||||
c_dirs: Optional[List[str]] = None,
|
||||
task_id: str = 'compute_log'
|
||||
|
||||
@app.post("/videos.json")
|
||||
async def videos_json(
|
||||
vpr: VideosPostRequest,
|
||||
session: Session = Depends(session_manager.use_session),
|
||||
):
|
||||
|
||||
query = vpr.query
|
||||
threshold = vpr.threshold
|
||||
c_dirs = vpr.c_dirs
|
||||
task_id = vpr.task_id
|
||||
if c_dirs is None:
|
||||
c_dirs = [
|
||||
# "/mnt/hdd_24tb_1/videos/ftp/leopards2/2025/08/26",
|
||||
# "/srv/ftp_tcc/leopards1/2025/08/27",
|
||||
# "/srv/ftp_tcc/leopards1/2025/08/28",
|
||||
# "/srv/ftp_tcc/leopards1/2025/08/29",
|
||||
# "/srv/ftp_tcc/leopards1/2025/08/30",
|
||||
# "/srv/ftp_tcc/leopards1/2025/08/31",
|
||||
# "/srv/ftp_tcc/leopards1/2025/09/01",
|
||||
# "/srv/ftp_tcc/leopards1/2025/09/02",
|
||||
# "/srv/ftp_tcc/leopards1/2025/09/03",
|
||||
# "/srv/ftp_tcc/leopards1/2025/09/04",
|
||||
# "/srv/ftp_tcc/leopards1/2025/09/05",
|
||||
# "/srv/ftp_tcc/leopards1/2025/09/06",
|
||||
# "/srv/ftp_tcc/leopards1/2025/09/07",
|
||||
"/srv/ftp_tcc/leopards1/2025/09/08",
|
||||
"/srv/ftp_tcc/leopards1/2025/09/09",
|
||||
"/srv/ftp_tcc/leopards1/2025/09/10",
|
||||
"/srv/ftp_tcc/leopards1/2025/09/11",
|
||||
]
|
||||
|
||||
|
||||
print(','.join([str(x) for x in c_dirs]))
|
||||
message = {'task':'SCHEDULED','when':[str(x) for x in c_dirs], 'time':time.time()}
|
||||
r.rpush(task_id, json.dumps(message))
|
||||
|
||||
|
||||
for x in c_dirs:
|
||||
message = {'task':'QUEUEING', 'when': str(x), 'time': time.time()}
|
||||
r.rpush(task_id, json.dumps(message))
|
||||
|
||||
folder_scores = ES.calculate_embedding_score_in_folders(
|
||||
tuple(c_dirs), threshold=threshold, query=query, redis_key = task_id)
|
||||
# if p_hits != ES.calculate_embedding_score_in_folders.cache_info().hits:
|
||||
# logger.info("FROM CACHE")
|
||||
# else:pp
|
||||
# logger.info("COMPUTED FROM SCRATCH")
|
||||
|
||||
|
||||
|
||||
folder_scores["breaks"] = ES.add_breaks_between_videos(folder_scores)
|
||||
folder_scores['videos'] = ES.collapse_scores_to_maxmin_avg(folder_scores)
|
||||
|
||||
|
||||
session["folder_scores"] = folder_scores
|
||||
return folder_scores
|
||||
|
||||
|
||||
class ClickEvent(BaseModel):
|
||||
timestamp: float
|
||||
|
||||
|
||||
class ClickResponse(BaseModel):
|
||||
path: str
|
||||
timeoffset: float
|
||||
Reference in New Issue
Block a user