This commit is contained in:
SA 2025-04-05 22:22:52 +02:00
parent 3a7b9b0a3b
commit b0a78ecaa7
29 changed files with 850 additions and 594 deletions

View File

@ -7,7 +7,7 @@ export const apiServerBaseURL = configuration.webserver.base_url;
export const sentryDSN = configuration.sentry_dsn_js;
export const apiServerPort = !apiServerBaseURL
? undefined
: configuration.webserver.port;
: configuration.webserver?.port;
export const siteName = configuration.webserver.ui.home.site_name || "Kemono";
export const homeBackgroundImage =
configuration.webserver.ui.home.home_background_image;

View File

@ -56,7 +56,7 @@ export async function fetchPopularPosts(
const path = `/posts/popular`;
const params = new URLSearchParams();
if (date) {
if (date && scale !== "recent") {
params.set("date", date);
}

View File

@ -1,5 +1,5 @@
import clsx from "clsx";
import { ReactNode } from "react";
import { MouseEventHandler, ReactNode } from "react";
import { NavLink, NavLinkProps } from "react-router";
export interface IFancyLinkProps extends IBaseLinkProps {
@ -34,6 +34,7 @@ interface IBaseLinkProps {
url: string;
className?: string;
children?: ReactNode;
onClick?: MouseEventHandler<HTMLAnchorElement>;
}
export function FancyLink({

View File

@ -1,5 +0,0 @@
.block {
text-align: center;
padding-top: 1em;
padding-bottom: 0.5em;
}

View File

@ -1,37 +0,0 @@
import { FormRouter, FormSection } from "#components/forms";
import * as styles from "./file_hash_search.module.scss";
interface IProps {
id: string;
hash?: string;
}
export function FileSearchForm({ id, hash }: IProps) {
return (
<FormRouter
id={id}
className={styles.block}
method="POST"
submitButton={() => <>Submit</>}
>
<FormSection>
<label htmlFor="file">File:</label>
<input id="file" type="file" name="file" />
</FormSection>
<FormSection>
<span style={{ userSelect: "none" }}>
<br />
or
<br />
</span>
</FormSection>
<FormSection>
<label htmlFor="hash">SHA256 hash:</label>
<input id="hash" type="text" name="hash" defaultValue={hash} />
</FormSection>
</FormRouter>
);
}

View File

@ -1,4 +1,3 @@
export { FileSearchForm } from "./file_hash_search";
export { ArchiveFileOverview } from "./archive-overview";
export type {
IFile,

View File

@ -100,7 +100,7 @@ export function PostBody({
{incomplete_rewards && (
<div className="post__warning" style={{ color: "red" }}>
<pre dangerouslySetInnerHTML={{ __html: incomplete_rewards }}></pre>
<pre>{incomplete_rewards}</pre>
</div>
)}

View File

@ -33,14 +33,3 @@ export function createArchiveFileURL(
return new InternalURL(path, params);
}
export function createFileSearchPageURL(hash?: string) {
const path = `/search_hash`;
const params = new URLSearchParams();
if (hash) {
params.set("hash", hash);
}
return new InternalURL(path, params);
}

View File

@ -48,7 +48,6 @@ export {
createThumbnailURL,
createFilePageURL,
createArchiveFileURL,
createFileSearchPageURL,
} from "./files";
export { createImporterStatusPageURL } from "./importer";
export {

View File

@ -0,0 +1,22 @@
import { KemonoLink } from "#components/links";
import { useNavigate } from "react-router";
import * as styles from "./errors.module.scss";
export function Error404() {
const navigate = useNavigate();
function goBack() {
navigate(-1);
}
return (
<div className={styles.errorPage}>
<h1>404</h1>
<div>The page you are looking for does not exist.</div>
<div>
<KemonoLink url="/">Home</KemonoLink>{" | "}
<KemonoLink url="#" onClick={goBack}>Go Back</KemonoLink>
</div>
</div>
)
}

View File

@ -0,0 +1,13 @@
.errorPage {
width: fit-content;
margin-left: auto;
margin-right: auto;
margin-top: 10vh;
text-align: center;
h1 {
margin-bottom: 5vh;
font-size: 500%;
user-select: none;
}
}

View File

@ -0,0 +1,5 @@
.error {
color: var(--negative-colour1-primary);
text-align: center;
margin-bottom: 1em;
}

View File

@ -1,34 +1,245 @@
import clsx from "clsx";
import { useState } from "react";
import { ActionFunctionArgs, redirect } from "react-router";
import { FormEvent, useState } from "react";
import { redirect, useNavigate } from "react-router";
import { PAYSITE_LIST, SITE_NAME } from "#env/env-vars";
import { createImporterStatusPageURL } from "#lib/urls";
import { fetchCreateImport } from "#api/imports";
import { useClient } from "#hooks";
import { PageSkeleton } from "#components/pages";
import { FormRouter, FormSection } from "#components/forms";
import { paysites } from "#entities/paysites";
import { isRegisteredAccount } from "#entities/account";
const dmLookup = ["patreon", "fansly"];
import * as styles from "./importer_list.module.scss";
const MAX_LENGTH = 1024;
function titlize(s: string) {
return s.replace("_", " ").replace(/\b\w+/g, text => text.charAt(0).toUpperCase() + text.substring(1).toLowerCase());
}
interface Input {
name: string;
label?: string;
hint?: string;
default?: () => string;
}
interface PaysiteForm {
name?: string;
inputs: Array<Input>;
// Returns an error message, or undefined if no errors.
validate: (...args: Array<any>) => string | undefined;
includeDMs?: boolean;
}
const PAYSITES: { [name: string]: PaysiteForm } = {
patreon: {
inputs: [
{ name: "session_key" },
],
validate({ session_key }) {
if (session_key.length != 43)
return `Invalid key: Expected 43 characters, got ${session_key.length}`;
},
includeDMs: true,
},
fanbox: {
name: "Pixiv Fanbox",
inputs: [
{ name: "session_key" },
],
validate({ session_key }) {
if (!session_key.match(/^\d+_\w+$/) || session_key.length > MAX_LENGTH) {
return "Invalid key.";
}
},
},
afdian: {
inputs: [
{ name: "session_key" },
],
validate({ session_key }) {
if (session_key.length > MAX_LENGTH) {
return "Key is too long.";
}
},
},
boosty: {
inputs: [
{ name: "session_key" },
],
validate({ session_key }) {
try {
JSON.parse(decodeURIComponent(session_key));
} catch {
return "Invalid key: Expected valid JSON.";
}
},
},
discord: {
inputs: [
{ name: "session_key", label: "Token" },
{ name: "channel_ids", label: "Channel IDs", hint: "Separate with commas." },
],
validate({ session_key, channel_ids }) {
if (!session_key.match(/^(mfa.[a-z0-9_-]{20,})|([a-z0-9_-]{23,28}.[a-z0-9_-]{6,7}.[a-z0-9_-]{27})$/)) {
return "Invalid token format.";
}
for (const id of channel_ids.split(/\s*,\s*/)) {
if (id && !parseInt(id)) { //
return `${id} is not a valid channel ID.`;
}
}
},
},
dlsite: {
name: "DLsite",
inputs: [
{ name: "session_key" },
],
validate({ session_key }) {
if (session_key.length > MAX_LENGTH) {
return "Key is too long.";
}
},
},
fantia: {
inputs: [
{ name: "session_key" },
],
validate({ session_key }) {
if (![32, 64].includes(session_key.length)) {
return "Invalid key: Expected 32 or 64 characters.";
}
},
},
gumroad: {
inputs: [
{ name: "session_key" },
],
validate({ session_key }) {
if (session_key.length < 200 || session_key.length > MAX_LENGTH) {
return `Invalid key: Expected 200 to ${MAX_LENGTH} characters.`;
}
},
},
subscribestar: {
name: "SubscribeStar",
inputs: [
{ name: "session_key" },
],
validate({ session_key }) {
if (session_key.length > MAX_LENGTH) {
return "Key is too long.";
}
},
},
onlyfans: {
name: "OnlyFans",
inputs: [
{ name: "session_key", hint: "Can be found in cookies -> sess." },
{ name: "auth_id", label: "User ID", hint: "Can be found in cookies -> auth_id." },
{ name: "x-bc", label: "BC Token", hint: "Can be found in local storage -> bcTokenSha. Paste <code style=\"user-select: all\">localStorage.bcTokenSha</code> into the console for easy access." },
{
name: "user_agent",
label: "User-Agent",
hint: "This needs to be set to the User-Agent of the last device that logged into your OnlyFans account; leave it as the default value if you are on it right now.",
default: () => navigator.userAgent,
},
],
validate({ session_key, auth_id: user_id, "x-bc": bc_token, user_agent }) {
if (session_key.length > MAX_LENGTH) {
return "Key is too long.";
}
if (!parseInt(user_id)) {
return "User ID must consist of only digits.";
}
if (!bc_token.match(/^[a-f0-9]{40}$/)) {
return "Invalid BC token (expected 40 hexadecimal characters).";
}
if (!/^[\x00-\x7F]*$/.test(user_agent)) {
return "Invalid User-Agent (contains non-ASCII characters).";
}
},
},
fansly: {
inputs: [
{
name: "session_key",
hint: `
Copy the following string and enter it into the browser Console,
accessible by pressing F12.
<core style="user-select: all">btoa(JSON.stringify({...JSON.parse(localStorage?.session_active_session||'{}'),device:localStorage?.device_device_id}))</code>
`
},
],
validate({ session_key }) {
if (session_key.length == 71 && !/^[A-Za-z0-9]{71}$/.test(session_key)) {
return "The key doesn't match the required pattern.";
}
try {
if (!JSON.parse(atob(session_key))?.token) {
return "Token not found in JSON.";
}
} catch {
return "Key is not valid JSON."
}
},
includeDMs: true,
},
candfans: {
name: "CandFans",
inputs: [
{ name: "session_key", hint: "On CandFans page, Press F12 -> \"Application\" tab (check >> if its hidden) -> Storage: Cookies -> candfans.jp -> secure_candfans_session value." },
],
validate({ session_key }) {
try {
let keys = Object.keys(JSON.parse(atob(decodeURIComponent(session_key))));
if (!["mac", "iv", "tag", "value"].every(key => keys.includes(key))) {
return "The key does not contain the appropriate values.";
}
} catch {
return "The key was not decodable.";
}
},
}
}
/**
* TODO: split into separate pages per service
*/
export function ImporterPage() {
const isClient = useClient();
const [selectedService, changeSelectedService] = useState(PAYSITE_LIST[0]);
const [error, setError] = useState<string | undefined>(undefined);
const navigate = useNavigate();
const title = "Import paywall posts/comments/DMs";
const heading = "Import from Paysite";
async function onSubmit(event: FormEvent) {
event.preventDefault();
setError(undefined);
let form = event.target as HTMLFormElement;
let inputs = form.querySelectorAll("input");
let args: {[key: string]: string} = { service: selectedService };
inputs.forEach(el => {
if (el.type == "checkbox") {
args[el.name] = el.checked ? "1" : "0";
} else {
args[el.name] = el.value.trim();
}
});
let error = PAYSITES[selectedService].validate(args);
if (error) {
setError(error);
} else {
try {
let { import_id } = await fetchCreateImport(args as any);
await navigate(`/importer/status/${import_id}`);
} catch (resp: any) {
setError(resp.message)
}
}
}
return (
<PageSkeleton name="importer" title={title} heading={heading}>
<FormRouter
id="import-list"
className="form form--bigger"
method="POST"
submitButton={(state) => "Submit key"}
>
<form id="import-list" className="form form--bigger" onSubmit={onSubmit}>
<div className="form__section">
<label htmlFor="service" className="form__label">
Paysite:
@ -41,199 +252,73 @@ export function ImporterPage() {
onChange={(event) => changeSelectedService(event.target.value)}
>
{PAYSITE_LIST.map((entry, index) => {
const paySite = paysites[entry];
const paysite = PAYSITES[entry];
if (!paysite) {
console.error(`Requested paysite '${entry}' does not exist in the table`);
return;
}
return (
<option key={index} className="form__option" value={entry}>
{paySite.title}
{paysite.name ?? titlize(entry)}
</option>
);
})}
</select>
</div>
<FormSection>
<label className="form__label" htmlFor="session-key">
Session key:
</label>
{PAYSITES[selectedService].inputs.map((input, index) => {
return (
<div className="form__section" key={input.name}>
<label className="form__label" htmlFor={input.name}>{input.label || titlize(input.name)}:</label>
<input
id="session-key"
className="form__input"
id={input.name}
type="text"
name="session_key"
name={input.name}
className="form__input"
autoComplete="off"
autoCorrect="off"
autoCapitalize="off"
spellCheck="false"
maxLength={1024}
required
value={input.default?.()}
/>
{index === 0 && (
<small className="form__subtitle other__notes">
<a href="/importer/tutorial">Learn how to get your session key.</a>
</small>
<small
className="form__subtitle fansly__notes"
hidden={selectedService !== "fansly" ? true : false}
>
Session key.
<br />
On desktop/mobile right click copy the link of this URL:
<br />
<pre>{`javascript:prompt('Fansly Token', btoa(JSON.stringify({...JSON.parse(localStorage?.session_active_session),device:localStorage?.device_device_id})))`}</pre>
then on the Fansly webpage enter{" "}
<span style={{ color: "white", fontWeight: "bold" }}>
javascript:
</span>{" "}
into your browser url bar and then paste the previously copied
string. This will output a result that is your session key
<br />
<br />
Alternative method: <br />
Copy the code string below and post this into the browser console.
(Open with keyboard F12 or right click {">"} Inspect Element)
<br />
Code:{" "}
<code
style={{ color: "#b5b5b5", userSelect: "all" }}
>{`btoa(JSON.stringify({...JSON.parse(localStorage?.session_active_session||'{}'),device:localStorage?.device_device_id}))`}</code>
</small>
<small
className="form__subtitle onlyfans__notes"
hidden={selectedService !== "onlyfans"}
>
Session key. Can be found in Cookies -{">"} sess.
</small>
<small
className="form__subtitle fanbox__notes"
hidden={selectedService !== "fanbox"}
>
On Fanbox page, Press F12 -{">"} "Application" tab (check {">"}
{">"} if its hidden) -{">"} Storage: Cookies -{">"} fanbox.cc -{">"}{" "}
FANBOXSESSID value <br />{" "}
<a href="/importer/tutorial_fanbox">
Detailed steps in how to get your fanbox session ID.
</a>
</small>
<small
className="form__subtitle candfans__notes"
hidden={selectedService !== "candfans"}
>
On CandFans page, Press F12 -{">"} "Application" tab (check {">"}
{">"} if its hidden) -{">"} Storage: Cookies -{">"} candfans.jp -
{">"} secure_candfans_session value. <br />{" "}
<a href="/importer/tutorial">Detailed steps.</a>{" "}
</small>
</FormSection>
{selectedService !== "onlyfans" ? undefined : (
<div id="onlyfans-section">
<div className="form__section">
<input
id="auth-id"
className="form__input"
type="text"
name="auth_id"
placeholder="User ID"
autoComplete="off"
autoCorrect="off"
autoCapitalize="off"
spellCheck="false"
maxLength={1024}
pattern="[0-9]{3,12}"
required
/>
<small className="form__subtitle">
Your user ID. Can be found in Cookies -{">"} auth_id.
</small>
</div>
<div className="form__section">
<input
id="x-bc"
className="form__input"
type="text"
name="x-bc"
placeholder="BC Token"
autoComplete="off"
autoCorrect="off"
autoCapitalize="off"
spellCheck="false"
maxLength={1024}
pattern="[0-9a-f]{18,50}"
required
/>
<small className="form__subtitle">
BC token. Can be found in Local Storage -{">"} bcTokenSha, or
the headers of an XHR request -{">"} x-bc.
<br />
Paste this on the console{" "}
<code style={{ color: "#b5b5b5", userSelect: "all" }}>
localStorage.bcTokenSha
</code>
</small>
</div>
<div className="form__section">
<label htmlFor="user-agent" className="form__label">
User Agent:
</label>
<input
id="user-agent"
className="form__input"
type="text"
name="user_agent"
placeholder="User Agent"
autoComplete="off"
autoCorrect="off"
autoCapitalize="off"
spellCheck="false"
minLength={10}
maxLength={1024}
required
defaultValue={!isClient ? undefined : navigator.userAgent}
/>
<small className="form__subtitle">
This needs to be set to the{" "}
<a
href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent"
target="_blank"
>
User-Agent
</a>
of the last device that logged into your OnlyFans account; leave
it as the default value if you are on it right now.
</small>
</div>
</div>
)}
{input.hint && (
<small className="form__subtitle other__notes" dangerouslySetInnerHTML={{__html: input.hint}} />
)}
</div>
)
})}
{PAYSITES[selectedService].includeDMs && (
<div
id="discord-section"
className={clsx(
"form__section",
selectedService !== "discord" && "form__section--hidden"
)}
id="dm-consent"
className="form__section form__section--checkbox"
>
<label htmlFor="channel_ids" className="form__label">
Discord channel IDs:
</label>
<input
type="text"
className="form__input"
id="channel_ids"
name="channel_ids"
autoComplete="off"
autoCorrect="off"
autoCapitalize="off"
spellCheck="false"
type="checkbox"
id="save-dms"
name="save_dms"
defaultChecked={true}
/>
<small className="form__subtitle">comma separated, no spaces</small>
<label className="form__label" htmlFor="save-dms">
Allow the importer to access your direct messages
<br />
<small className="form__subtitle">
You will be able to manually approve or discard messages before
they are publicly displayed.
</small>
</label>
</div>
)}
<div id="consent" className="form__section form__section--checkbox">
<input
@ -242,7 +327,6 @@ export function ImporterPage() {
defaultChecked={true}
id="save-session-key"
name="save_session_key"
value="1"
/>
<label className="form__label" htmlFor="save-session-key">
Allow administrator to use my session for debugging
@ -264,7 +348,6 @@ export function ImporterPage() {
defaultChecked={true}
id="auto_import"
name="auto_import"
value="1"
/>
<label className="form__label" htmlFor="auto_import">
Allow the importer to save my session key for auto-import
@ -277,47 +360,17 @@ export function ImporterPage() {
</label>
</div>
<div
id="dm-consent"
className={clsx(
"form__section",
"form__section--checkbox",
!dmLookup.includes(selectedService) && "form__section--hidden"
)}
>
<input
className="form__input"
type="checkbox"
id="save-dms"
name="save_dms"
value="1"
defaultChecked={dmLookup.includes(selectedService)}
/>
<label className="form__label" htmlFor="save-dms">
Allow the importer to access your direct messages
<br />
<small className="form__subtitle">
You will be able to manually approve or discard messages before
they are publicly displayed.
</small>
</label>
</div>
{selectedService == "fanbox" && (
<div
id="fanbox-test-consent"
className={clsx(
"form__section",
"form__section--checkbox",
selectedService !== "fanbox" && "form__section--hidden"
)}
className={"form__section form__section--checkbox"}
>
<input
className="form__input"
type="checkbox"
id="fanbox-test-consent"
name="fanbox-test-consent"
defaultChecked={false}
value="1"
required
/>
<label className="form__label" htmlFor="fanbox-test-consent">
I agree that this importer is in its testing phase, and that there
@ -328,7 +381,14 @@ export function ImporterPage() {
</small>
</label>
</div>
</FormRouter>
)}
<div className={styles.error} hidden={!error}>
Error: {error}
</div>
<button className="button form__button form__button--submit">Submit Key</button>
</form>
<h2 className="site-section__subheading">Important information</h2>
<p>
@ -376,95 +436,3 @@ export function ImporterPage() {
</PageSkeleton>
);
}
export async function action({ request }: ActionFunctionArgs) {
try {
if (request.method !== "POST") {
throw new Error(`Unknown method "${request.method}".`);
}
const data = await request.formData();
const service = (data.get("service") as string | null)?.trim();
{
if (!service) {
throw new Error("Service name is required.");
}
if (!PAYSITE_LIST.includes(service)) {
throw new Error(`Unknown service "${service}".`);
}
}
let isDMConsentChecked: boolean | undefined = undefined;
{
const inputValue = (data.get("save_dms") as string | null)?.trim();
if (inputValue === "1") {
isDMConsentChecked = true;
}
}
if (
service === "patreon" &&
isDMConsentChecked &&
!(await isRegisteredAccount())
) {
throw new Error("You must be registered to import DMs.");
}
let isFanboxConsentChecked: boolean | undefined = undefined;
{
const inputValue = (
data.get("fanbox-test-consent") as string | null
)?.trim();
if (inputValue === "1") {
isFanboxConsentChecked = true;
}
}
if (service === "fanbox" && !isFanboxConsentChecked) {
throw new Error("You need to agree to fanbox test imports.");
}
const sessionKey = (data.get("session_key") as string | null)?.trim();
{
if (!sessionKey) {
throw new Error("Session key is required.");
}
}
const saveSessionKey = (
data.get("save_session_key") as string | null
)?.trim();
const autoImport = (data.get("auto_import") as string | null)?.trim();
const userAgent = (data.get("user_agent") as string | null)?.trim();
const onlyfansXBC = (data.get("x-bc") as string | null)?.trim();
const onlyfansAuthID = (data.get("auth_id") as string | null)?.trim();
const discordChannelIDs = (
data.get("channel_ids") as string | null
)?.trim();
const { import_id } = await fetchCreateImport({
service,
session_key: sessionKey,
save_session_key: saveSessionKey,
auto_import: autoImport,
save_dms: isDMConsentChecked,
user_agent: userAgent,
"x-bc": onlyfansXBC,
auth_id: onlyfansAuthID,
channel_ids: discordChannelIDs,
});
return redirect(String(createImporterStatusPageURL(import_id)));
} catch (error) {
return error;
}
}

View File

@ -0,0 +1,21 @@
.searchForm {
text-align: center;
padding-top: 1em;
padding-bottom: 0.5em;
width: fit-content;
margin-left: auto;
margin-right: auto;
span {
user-select: none;
}
.error {
color: red;
}
button {
width: 100%;
margin-top: 1em;
}
}

View File

@ -1,37 +1,76 @@
import {
ActionFunctionArgs,
LoaderFunctionArgs,
redirect,
useLoaderData,
} from "react-router";
// TODO: https://github.com/Daninet/hash-wasm probably
// since this one wasn't updated in 3 years
import sha256 from "sha256-wasm";
import {
createDiscordChannelPageURL,
createFileSearchPageURL,
} from "#lib/urls";
import { fetchSearchFileByHash } from "#api/files";
import { PageSkeleton } from "#components/pages";
import { CardList, PostCard } from "#components/cards";
import { KemonoLink } from "#components/links";
import { FileSearchForm } from "#entities/files";
interface IProps {
hash?: string;
result?: Awaited<ReturnType<typeof fetchSearchFileByHash>>;
}
import * as styles from "./search_hash.module.scss";
import { MouseEvent, useState } from "react";
import { LoadingIcon } from "#components/loading";
export function SearchFilesPage() {
const { hash, result } = useLoaderData() as IProps;
const title = "Search files";
const heading = "Search Files";
const [loading, setLoading] = useState(false);
const [error, setError] = useState("");
const [files, setFiles] = useState<FileList | null>(null);
const [hash, setHash] = useState("");
const [result, setResult] = useState<Awaited<ReturnType<typeof fetchSearchFileByHash>> | null>(null);
async function submitClicked(event: MouseEvent) {
event.preventDefault();
setLoading(true);
try {
if (files?.length) {
let fileHash = await getFileHash(files[0]);
setResult(await fetchSearchFileByHash(fileHash));
} else {
if (hash.match(/[a-f0-9]{64}/)) {
setResult(await fetchSearchFileByHash(hash));
} else {
setError("Invalid hash!");
}
}
} finally {
setLoading(false);
}
}
return (
<PageSkeleton name="file-hash-search" title={title} heading={heading}>
<FileSearchForm id="file-search" hash={hash} />
<form className={styles.searchForm}>
<div className="form__section">
<label htmlFor="file">File:</label>
<input id="file" type="file" name="file" onChange={e => setFiles(e.target.files)} />
</div>
{!result?.posts.length ? (
<div className="form__section" style={{marginTop: "-1em"}}>
<span>
<br />
or
<br />
</span>
</div>
<div className="form__section">
<label htmlFor="hash">SHA256 hash:</label>
<input id="hash" type="text" name="hash" onChange={e => setHash(e.target.value.toLowerCase())} value={hash} />
</div>
<div className={styles.error} style={{display: error ? "block" : "none"}}>
{error}
</div>
<button className="button" onClick={submitClicked} disabled={!(files?.length || hash) || loading}>Submit</button>
</form>
{loading ? <LoadingIcon /> : !result?.posts?.length ? (
<div className="no-results">
<h2 className="site-section__subheading">
Nobody here but us chickens!
@ -70,47 +109,6 @@ export function SearchFilesPage() {
);
}
export async function loader({ request }: LoaderFunctionArgs): Promise<IProps> {
const searchParams = new URL(request.url).searchParams;
const hash = searchParams.get("hash")?.trim();
if (!hash) {
return {};
}
const result = await fetchSearchFileByHash(hash);
return { hash, result };
}
export async function action({ request }: ActionFunctionArgs) {
if (request.method !== "POST") {
throw new Error("Unknown method");
}
const data = await request.formData();
const file = data.get("file") as File | null;
let hash = data.get("hash") as string | null;
if (!file && !hash) {
throw new Error("Neither file nor hash is provided");
}
if (file) {
hash = await getFileHash(file);
}
if (hash && !hash.match(/[A-Fa-f0-9]{64}/)) {
throw new Error("Hash is not a valid SHA256 value.");
}
await fetchSearchFileByHash(hash as string);
return redirect(String(createFileSearchPageURL(hash as string)));
}
async function getFileHash(file: File) {
const fileSize = file.size;
const chunkSize = 1024 * 1024; // 1Mi

View File

@ -3,15 +3,8 @@ import { ErrorPage } from "#components/pages";
import { HomePage } from "#pages/home";
import { ImporterTutorialPage } from "#pages/importer/importer_tutorial";
import { ImporterTutorialFanboxPage } from "#pages/importer/importer_tutorial_fanbox";
import {
ImporterPage,
action as importerPageAction,
} from "#pages/importer/importer_list";
import {
SearchFilesPage,
loader as searchFilesPageLoader,
action as searchFilesPageAction,
} from "#pages/search_hash";
import { ImporterPage } from "#pages/importer/importer_list";
import { SearchFilesPage } from "#pages/search_hash";
import { ImporterOKPage } from "#pages/importer/importer_ok";
import {
AdministratorDashboardPage,
@ -153,6 +146,7 @@ import {
LegacyFilePage,
loader as legacyFilePageLoader,
} from "#pages/file/legacy";
import { Error404 } from "#pages/errors/404";
export const routes = [
{
@ -191,7 +185,6 @@ export const routes = [
{
path: "/importer",
element: <ImporterPage />,
action: importerPageAction,
},
{
path: "/importer/ok",
@ -213,8 +206,6 @@ export const routes = [
{
path: "/search_hash",
element: <SearchFilesPage />,
loader: searchFilesPageLoader,
action: searchFilesPageAction,
},
{
path: "/artists",
@ -438,6 +429,10 @@ export const routes = [
loader: administratorAccountOverviewPageLoader,
action: administratorAccountOverviewPageAction,
},
{
path: "/*",
element: <Error404 />,
}
],
},
],

View File

@ -10,19 +10,47 @@ const config = defineConfig(async (configEnv) => {
/**
* @type {import("vite").UserConfig}
*/
const targetBackend = `${apiServerBaseURL}${apiServerPort ? `:${apiServerPort}` : ''}`;
const proxyConfig = {};
if (targetBackend.includes("kemono.su")) {
proxyConfig["/api/v1/creators"] = {
target: "https://kemono.su", // target the base URL for the proxy
changeOrigin: true,
secure: true,
rewrite: (path) => path.replace(/^\/api\/v1\/creators/, '/api/v1/creators.txt'),
};
}
proxyConfig["/api"] = {
target: targetBackend,
changeOrigin: true,
secure: true,
};
proxyConfig["/icons"] = {
target: targetBackend,
changeOrigin: true,
secure: true,
};
proxyConfig["/thumbnail"] = {
target: targetBackend,
changeOrigin: true,
secure: true,
};
proxyConfig["/data"] = {
target: targetBackend,
changeOrigin: true,
secure: true,
};
const devConfig = {
server: {
host: "0.0.0.0",
port: 3450,
strictPort: true,
proxy: {
"/api": `${apiServerBaseURL}:${apiServerPort}`
},
allowedHosts: [
"web",
],
proxy: proxyConfig,
allowedHosts: ["all"],
},
};
const resolvedBase = await baseConfig(configEnv);
const finalConfig = mergeConfig(resolvedBase, devConfig);

View File

@ -20,8 +20,8 @@ const devServer = {
port: 3450,
proxy: [
{
context: ["/api"],
target: `${apiServerBaseURL}:${apiServerPort}`,
context: ["/api","/icons","/data", "/thumbnail"],
target: `localhost:3449`,
},
],
static: {

View File

@ -0,0 +1,31 @@
{
"site": "http://localhost:3450",
"development_mode": true,
"automatic_migrations": true,
"webserver": {
"secret": "To SECRET name.",
"base_url": "https://kemono.su",
"ui": {
"home": {
"site_name": "Kemono"
},
"config": {
"paysite_list": ["patreon", "fanbox", "afdian"],
"artists_or_creators": "Artists"
}
}
},
"database": {
"host": "postgres",
"user": "kemono",
"password": "kemono",
"database": "kemono"
},
"redis": {
"defaults": {
"host": "redis",
"port": 6379,
"db": 0
}
}
}

View File

@ -1,7 +1,6 @@
version: '3'
services:
postgres:
image: groonga/pgroonga:latest-debian-16
image: groonga/pgroonga:3.1.6-alpine-16-slim
restart: unless-stopped
environment:
- POSTGRES_DB=kemono
@ -19,11 +18,12 @@ services:
ports:
- '16379:6379'
web:
api:
build:
context: .
args:
GIT_COMMIT_HASH: "custom"
BUILD_DATE: "1970.01.01"
restart: unless-stopped
depends_on:
- postgres
@ -52,6 +52,21 @@ services:
command:
[ "python", "-m", "src", "run" ]
web:
build:
context: .
dockerfile: Dockerfile-client
args:
GIT_COMMIT_HASH: "custom"
BUILD_DATE: "1970.01.01"
restart: unless-stopped
depends_on:
- api
volumes:
- ./:/app
command:
[ "npm", "run", "dev" ]
nginx:
image: nginx
depends_on:
@ -63,16 +78,3 @@ services:
- ./storage/files:/storage
- ./:/app
webpack:
build:
context: .
restart: unless-stopped
environment:
- FLASK_ENV=development
- KEMONO_SITE=http://localhost:5000
- KEMONO_CONFIG=config.example.json
- PYTHONPATH=/app
volumes:
- .:/app
command:
[ "python", "-m", "src", "webpack" ]

View File

@ -44,3 +44,94 @@ Through a PPA (Personal Package Archives).
```sh
which python3.12
```
## How Do I Install Node.js 22.14?
### For Linux/macOS:
1. Install NVM:
```sh
# Using curl
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
# OR using wget
wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
```
2. Add NVM to your shell configuration:
The installer should automatically add the necessary configuration to your shell profile
(`.bashrc`, `.zshrc`, etc.), but if it doesn't, add these lines manually:
```sh
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion
```
3. Either restart your terminal or source your profile:
```sh
# For bash
source ~/.bashrc
# For zsh
source ~/.zshrc
```
4. Verify NVM installation:
```sh
nvm --version
```
5. Install Node.js 22.14:
```sh
nvm install 22.14
```
6. Set it as the default (optional):
```sh
nvm alias default 22.14
```
7. Verify the installation:
```sh
node --version # Should output v22.14.0
npm --version # NPM is included with Node.js 10.9.2+
```
8. If you need to switch between Node.js versions:
```sh
nvm ls
nvm use 22.14
nvm use default
```
### For Windows:
Windows users can install Node.js directly using the official installer:
1. Go to the Node.js downloads page:
https://nodejs.org/en/download/current
2. Download the Windows installer for v22.14.0:
- Look for `node-v22.14.X-x64.msi` (64-bit installer)
- The "X" represents the patch version which may change
3. Run the downloaded MSI file and follow the installation wizard.
4. Verify the installation by opening Command Prompt or PowerShell and running:
```
node --version
npm --version
```
5. The installer includes npm, so no separate installation is needed. npm version should be 10.9.2 or newer.

42
docs/develop-docker.md Normal file
View File

@ -0,0 +1,42 @@
# Docker Universal Setup
The quickest way to get started with development is using Docker. This approach works on all operating systems and requires minimal setup.
## Prerequisites
- [Docker](https://docs.docker.com/get-docker/)
- [Docker Compose](https://docs.docker.com/compose/install/)
## Setup Steps
1. Clone the repository (if you haven't already)
2. Copy the example configuration file:
```sh
cp config.json.example config.json
```
3. Start the containers using Docker Compose:
```sh
docker compose up
```
4. Once the containers are running, access the application at:
[http://localhost:5000](http://localhost:5000)
## Stop the Environment
To stop the containers, press `Ctrl+C` in the terminal where Docker Compose is running, or run:
```sh
docker compose down
```
## Notes
- No local dependencies are required with this method
- This setup is ideal for testing or quick development
- All services (backend, frontend, database) run in containers

147
docs/develop-linux.md Normal file
View File

@ -0,0 +1,147 @@
# Develop
For now Docker is a primary way of working on the repo.
However dependencies are still needed to installed locally for
the IDE setup.
## Requirements:
Python: 3.12+
NodeJS: 22.14+
PostgreSQL: 16+
Redis: 6+
## Installation
1. Check if node 22.14 or 22.13 is installed in the system:
```
node --version
```
If fails, follow [installation instructions](./FAQ.md#how-do-i-install-nodejs-2214-using-nvm)
2. Check if python 3.12 is installed in the system:
```sh
which python 3.12
```
If no path returned, follow [installation instructions](./FAQ.md#how-do-i-install-python-312-on-ubuntu-22)
3. Install `virtualenv` package if it's not installed.
```sh
pip install --user virtualenv
```
4. Create a virtual environment:
```sh
virtualenv python=3.12 venv
```
5. Activate the virtual environment.
```sh
# Windows ➞ venv\Scripts\activate
source venv/bin/activate
```
6. Install python packages.
```sh
pip install --requirement requirements.txt
```
7. Install `pre-commit` hooks.
```sh
pre-commit install --install-hooks
```
## Database Setup
1. Install PostgreSQL if not already installed.
2. Create a database and user for the application.
3. Write in the config.json the credentials
## Redis Setup
1. Install Redis if not already installed.
2. Verify Redis is running.
3. Write in the config.json the credentials
## File Paths Configuration
The application requires several directories for storing and serving files:
1. Make sure you have the directories with files, /data, /thumbnails, /icons:
2. Serve them with nginx or what is of your choice:
3. Configure the base url to serve from that file server by setting the following env variables:
```env
ICONS_PREPEND
BANNERS_PREPEND
THUMBNAILS_PREPEND
```
## RUN
1. Run the API dev server:
```sh
python -m src web
```
2. Run frontend dev server:
```sh
python -m src webpack
```
## Git
Configure `git` to store credentials:
```sh
git config credential.helper store
```
After the next time creds are accepted, they will be saved on hard drive
as per rules listed in `man git-credential-store`and won't be asked again.
Alternatively they can be stored temporarily in memory:
```sh
git config credential.helper cache
```
The creds are stored as per rules in `man git-credential-cache`.
## IDE
_IDE specific instructions._
### VSCode
1. Copy `.code-workspace` file.
```sh
cp \
configs/workspace.code-workspace.example \
kemono-2.code-workspace
```
2. Install the recommended extensions.
[`http://localhost:5000/development`]: http://localhost:5000/development
[`http://localhost:5000/`]: http://localhost:5000/
[`http://localhost:8000/`]: http://localhost:8000/

View File

@ -0,0 +1,32 @@
# Windows Client Only Setup with Reverse Proxy
This guide covers setting up a frontend-only development environment on Windows with a reverse proxy to connect to a remote backend.
## Requirements
- NodeJS: 22.14+
- Git
- Docker
## Installation
1. Check if Node.js 22.14+ is installed:
```
node --version
```
If not installed or version is older, follow the [Node.js installation instructions](./FAQ.md#how-do-i-install-nodejs-2214)
2. Clone the repository (if you haven't already)
3. Navigate to the client directory and run the development server:
```
cp config.example.production.json config.json
cd client
npm run dev
```
4. Access the application at:
[http://localhost:5000/](http://localhost:5000/)
5. Make sure the images and api calls are being reverse proxied.

View File

@ -1,112 +1,28 @@
# Develop
# Development Guide
For now Docker is a primary way of working on the repo.
However dependencies are still needed to installed locally for
the IDE setup.
This guide provides several ways to set up your development environment based on your needs and operating system.
## Requirements:
## Setup Options
Python: 3.12+
NodeJS: 18.20+
### [Docker Universal](./develop-docker.md)
The quickest way to get started with minimal setup:
- Works on all operating systems
- Uses Docker Compose
- No need to install dependencies locally
## Installation
1. Check if python 3.12 is installed in the system:
### [Windows Client Only](./develop-windows-client.md)
Frontend development on Windows with reverse proxy:
- Only frontend dependencies required
- Uses reverse proxy to connect to remote backend
- Lightweight setup
```sh
which python 3.12
```
### [Linux Setup](./develop-linux.md)
Complete development environment for Linux:
- Local dependency installation
- Full development stack
- IDE configuration
If no path returned, follow [installation instructions](./FAQ.md#how-do-i-install-python-312-on-ubuntu-22)
## Getting Help
2. Install `virtualenv` package if it's not installed.
```sh
pip install --user virtualenv
```
3. Create a virtual environment:
```sh
virtualenv python=3.12 venv
```
4. Activate the virtual environment.
```sh
# Windows ➞ venv\Scripts\activate
source venv/bin/activate
```
5. Install python packages.
```sh
pip install --requirement requirements.txt
```
6. Install`pre-commit`hooks.
```sh
pre-commit install --install-hooks
```
### Build
```sh
docker-compose build
docker-compose up --detach
```
In a browser, visit[`http://localhost:8000/`]
## Manual
1. Run the API dev server:
```sh
pytnon -m src web
```
2. Run frontend dev server:
```sh
python -m src webpack
```
## Git
Configure `git` to store credentials:
```sh
git config credential.helper store
```
After the next time creds are accepted, they will be saved on hard drive
as per rules listed in `man git-credential-store`and won't be asked again.
Alternatively they can be stored temporarily in memory:
```sh
git config credential.helper cache
```
The creds are stored as per rules in `man git-credential-cache`.
## IDE
_IDE specific instructions._
### VSCode
1. Copy`.code-workspace`file.
```sh
cp \
configs/workspace.code-workspace.example \
kemono-2.code-workspace
```
2. Install the recommended extensions.
[`http://localhost:5000/development`]: http://localhost:5000/development
[`http://localhost:5000/`]: http://localhost:5000/
[`http://localhost:8000/`]: http://localhost:8000/
For additional assistance, check the [FAQ](./FAQ.md).

View File

@ -60,7 +60,7 @@
"description": "Configuration for the frontend server.",
"type": "object",
"additionalProperties": false,
"required": ["secret", "port", "ui"],
"required": ["secret", "ui"],
"properties": {
"secret": {
"description": "Secret key used to encrypt sessions.",

View File

@ -9,7 +9,7 @@ export interface IConfiguration {
interface IServerConfig {
ui: IUIConfig;
port: number;
port?: number;
base_url?: string;
}

View File

@ -66,7 +66,7 @@ class TDFileRelationships(TypedDict):
discord_posts: list[TDDiscordPost]
def get_file_relationships(file_hash: str, reload: bool = False) -> TDFileRelationships | None:
def get_file_relationships(file_hash: str, reload: bool = False) -> TDFileRelationships:
key = f"files:by_hash:{file_hash}"
query = """
SELECT

View File

@ -1,3 +1,4 @@
import re
from flask import jsonify, make_response, request
from src.config import Configuration
@ -8,19 +9,17 @@ from src.lib.api import create_not_found_error_response, create_client_error_res
from src.pages.api.v1 import v1api_bp
HASH_REGEX = re.compile(r"[a-f0-9]{64}")
@v1api_bp.get("/search_hash/<file_hash>")
def lookup_file(file_hash):
if not (len(file_hash) == 64 and all(c in "0123456789abcdefABCDEF" for c in file_hash)):
def lookup_file(file_hash: str):
file_hash = file_hash.lower()
if not HASH_REGEX.match(file_hash):
return create_client_error_response("Invalid SHA256 hash")
file = get_file_relationships(file_hash)
if not (file):
response = create_not_found_error_response()
response.headers["Cache-Control"] = "s-maxage=600"
return response
response = make_response(jsonify(file), 200)
response.headers["Cache-Control"] = "s-maxage=600"