remove old log and finish metrics for scrapePart

Co-authored-by: William Oldham <github@binaryoverload.co.uk>
This commit is contained in:
mrjvs 2023-11-11 16:17:13 +01:00
parent a7bd4786f3
commit 76d715a751
4 changed files with 87 additions and 26 deletions

View file

@ -1,8 +1,10 @@
import { ScrapeMedia } from "@movie-web/providers";
import { ofetch } from "ofetch"; import { ofetch } from "ofetch";
import { useCallback } from "react"; import { useCallback } from "react";
import { useBackendUrl } from "@/hooks/auth/useBackendUrl"; import { ScrapingItems, ScrapingSegment } from "@/hooks/useProviderScrape";
import { ScrapingSegment } from "@/hooks/useProviderScrape";
const metricsEndpoint = "https://backend.movie-web.app/metrics/providers";
export type ProviderMetric = { export type ProviderMetric = {
tmdbId: string; tmdbId: string;
@ -15,37 +17,93 @@ export type ProviderMetric = {
embedId?: string; embedId?: string;
errorMessage?: string; errorMessage?: string;
fullError?: string; fullError?: string;
hostname?: string;
}; };
export async function reportProviders( export async function reportProviders(items: ProviderMetric[]): Promise<void> {
url: string, return ofetch(metricsEndpoint, {
items: ProviderMetric[]
): Promise<void> {
return ofetch("/metrics/providers", {
method: "POST", method: "POST",
body: { body: {
items, items: items.map((v) => ({
...v,
hostname: window.location.hostname,
})),
}, },
baseURL: url,
}); });
} }
export function scrapSegmentToProviderMetric( const segmentStatusMap: Record<
_segment: ScrapingSegment ScrapingSegment["status"],
): ProviderMetric { ProviderMetric["status"] | null
// TODO actually convert this > = {
return {} as any; success: "success",
notfound: "notfound",
failure: "failed",
pending: null,
waiting: null,
};
export function scrapeSegmentToProviderMetric(
media: ScrapeMedia,
providerId: string,
segment: ScrapingSegment
): ProviderMetric | null {
const status = segmentStatusMap[segment.status];
if (!status) return null;
let episodeId: string | undefined;
let seasonId: string | undefined;
if (media.type === "show") {
episodeId = media.episode.tmdbId;
seasonId = media.season.tmdbId;
}
let error: undefined | Error;
if (segment.error instanceof Error) error = segment.error;
return {
status,
providerId,
title: media.title,
tmdbId: media.tmdbId,
type: media.type,
embedId: segment.embedId,
episodeId,
seasonId,
errorMessage: segment.reason ?? error?.message,
fullError: error
? `${error.toString()}\n\n${error.stack ?? ""}`
: undefined,
};
}
export function scrapePartsToProviderMetric(
media: ScrapeMedia,
order: ScrapingItems[],
sources: Record<string, ScrapingSegment>
): ProviderMetric[] {
const output: ProviderMetric[] = [];
order.forEach((orderItem) => {
const source = sources[orderItem.id];
orderItem.children.forEach((embedId) => {
const embed = sources[embedId];
if (!embed.embedId) return;
const metric = scrapeSegmentToProviderMetric(media, source.id, embed);
if (!metric) return;
output.push(metric);
});
const metric = scrapeSegmentToProviderMetric(media, source.id, source);
if (!metric) return;
output.push(metric);
});
return output;
} }
export function useReportProviders() { export function useReportProviders() {
const url = useBackendUrl(); const report = useCallback((items: ProviderMetric[]) => {
// TODO constant url reportProviders(items);
const report = useCallback( }, []);
(items: ProviderMetric[]) => {
reportProviders(url, items);
},
[url]
);
return { report }; return { report };
} }

View file

@ -11,6 +11,7 @@ export interface ScrapingItems {
export interface ScrapingSegment { export interface ScrapingSegment {
name: string; name: string;
id: string; id: string;
embedId?: string;
status: "failure" | "pending" | "notfound" | "success" | "waiting"; status: "failure" | "pending" | "notfound" | "success" | "waiting";
reason?: string; reason?: string;
error?: unknown; error?: unknown;
@ -73,6 +74,7 @@ export function useScrape() {
const source = providers.getMetadata(v.embedScraperId); const source = providers.getMetadata(v.embedScraperId);
if (!source) throw new Error("invalid source id"); if (!source) throw new Error("invalid source id");
const out: ScrapingSegment = { const out: ScrapingSegment = {
embedId: v.embedScraperId,
name: source.name, name: source.name,
id: v.id, id: v.id,
status: "waiting", status: "waiting",

View file

@ -4,7 +4,7 @@ import { useEffect, useRef } from "react";
import type { AsyncReturnType } from "type-fest"; import type { AsyncReturnType } from "type-fest";
import { import {
scrapSegmentToProviderMetric, scrapePartsToProviderMetric,
useReportProviders, useReportProviders,
} from "@/backend/helpers/report"; } from "@/backend/helpers/report";
import { usePlayer } from "@/components/player/hooks/usePlayer"; import { usePlayer } from "@/components/player/hooks/usePlayer";
@ -64,8 +64,10 @@ export function ScrapingPart(props: ScrapingProps) {
resultRef.current.sourceOrder resultRef.current.sourceOrder
); );
report( report(
Object.values(resultRef.current.sources).map((v) => scrapePartsToProviderMetric(
scrapSegmentToProviderMetric(v) props.media,
resultRef.current.sourceOrder,
resultRef.current.sources
) )
); );
props.onGetStream?.(output); props.onGetStream?.(output);

View file

@ -73,6 +73,5 @@ export function migrateV4Videos(old: WatchedStoreData) {
} }
} }
console.log(newItems);
return newItems; return newItems;
} }