mirror of
https://github.com/NebulaServices/Nebula.git
synced 2025-05-12 11:30:01 -04:00
Chore: format
This commit is contained in:
parent
7d325d424a
commit
c18f175b08
8 changed files with 236 additions and 195 deletions
|
@ -4,9 +4,9 @@ import tailwind from "@astrojs/tailwind";
|
|||
import { baremuxPath } from "@mercuryworkshop/bare-mux";
|
||||
import { epoxyPath } from "@mercuryworkshop/epoxy-transport";
|
||||
import { libcurlPath } from "@mercuryworkshop/libcurl-transport";
|
||||
import playformCompress from "@playform/compress";
|
||||
import { uvPath } from "@titaniumnetwork-dev/ultraviolet";
|
||||
import icon from "astro-icon";
|
||||
import playformCompress from "@playform/compress";
|
||||
import { defineConfig, envField } from "astro/config";
|
||||
import { viteStaticCopy } from "vite-plugin-static-copy";
|
||||
import { version } from "./package.json";
|
||||
|
@ -14,12 +14,17 @@ export default defineConfig({
|
|||
experimental: {
|
||||
env: {
|
||||
schema: {
|
||||
VERSION: envField.string({ context: 'client', access: 'public', optional: true, default: version })
|
||||
VERSION: envField.string({
|
||||
context: "client",
|
||||
access: "public",
|
||||
optional: true,
|
||||
default: version
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
integrations: [
|
||||
tailwind(),
|
||||
tailwind(),
|
||||
icon(),
|
||||
svelte(),
|
||||
playformCompress({
|
||||
|
@ -82,7 +87,7 @@ export default defineConfig({
|
|||
"/styles": {
|
||||
target: "http://localhost:8080",
|
||||
changeOrigin: true
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -1,44 +1,44 @@
|
|||
import { readFileSync } from 'node:fs';
|
||||
import { parse, TomlPrimitive } from 'smol-toml';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import chalk from 'chalk';
|
||||
import { readFileSync } from "node:fs";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import chalk from "chalk";
|
||||
import { TomlPrimitive, parse } from "smol-toml";
|
||||
|
||||
interface TomlData {
|
||||
marketplace: {
|
||||
enabled: boolean;
|
||||
psk: String
|
||||
}
|
||||
psk: String;
|
||||
};
|
||||
server: {
|
||||
server: {
|
||||
port: number;
|
||||
wisp: boolean;
|
||||
logging: boolean;
|
||||
}
|
||||
};
|
||||
rammerhead: {
|
||||
reverseproxy: boolean;
|
||||
localstorage_sync: boolean;
|
||||
http2: boolean;
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
db: {
|
||||
name: string;
|
||||
username: string;
|
||||
password: string;
|
||||
postgres: boolean;
|
||||
},
|
||||
};
|
||||
postgres: {
|
||||
domain: string;
|
||||
port: number;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
interface Verify {
|
||||
name: string,
|
||||
typeOF: any,
|
||||
type: any
|
||||
name: string;
|
||||
typeOF: any;
|
||||
type: any;
|
||||
}
|
||||
|
||||
let doc = readFileSync(fileURLToPath(new URL('../config.toml', import.meta.url))).toString();
|
||||
let doc = readFileSync(fileURLToPath(new URL("../config.toml", import.meta.url))).toString();
|
||||
const parsedDoc = parse(doc) as unknown as TomlData;
|
||||
|
||||
function verify(t: Verify[]) {
|
||||
|
@ -50,33 +50,41 @@ function verify(t: Verify[]) {
|
|||
}
|
||||
|
||||
verify([
|
||||
{name: 'marketplace', typeOF: parsedDoc.marketplace, type: 'object'},
|
||||
{name: 'marketplace.enabled', typeOF: parsedDoc.marketplace.enabled, type: 'boolean'},
|
||||
{name: 'marketplace.psk', typeOF: parsedDoc.marketplace.psk, type: 'string'},
|
||||
{name: 'server', typeOF: parsedDoc.server, type: 'object'},
|
||||
{name: 'server.server', typeOF: parsedDoc.server.server, type: 'object'},
|
||||
{name: 'server.rammerhead', typeOF: parsedDoc.server.rammerhead, type: 'object'},
|
||||
{name: 'server.server.port', typeOF: parsedDoc.server.server.port, type: 'number'},
|
||||
{name: 'server.server.wisp', typeOF: parsedDoc.server.server.wisp, type: 'boolean'},
|
||||
{name: 'server.server.logging', typeOF: parsedDoc.server.server.logging, type: 'boolean'},
|
||||
{name: 'server.rammerhead.reverseproxy', typeOF: parsedDoc.server.rammerhead.reverseproxy, type: 'boolean'},
|
||||
{name: 'server.rammerhead.localstorage_sync', typeOF: parsedDoc.server.rammerhead.localstorage_sync, type: 'boolean'},
|
||||
{name: 'server.rammerhead.http2', typeOF: parsedDoc.server.rammerhead.http2, type: 'boolean'},
|
||||
{name: 'db', typeOF: parsedDoc.db, type: 'object'},
|
||||
{name: 'db.name', typeOF: parsedDoc.db.name, type: 'string'},
|
||||
{name: 'db.username', typeOF: parsedDoc.db.username, type: 'string'},
|
||||
{name: 'db.password', typeOF: parsedDoc.db.password, type: 'string'},
|
||||
{name: 'db.postgres', typeOF: parsedDoc.db.postgres, type: 'boolean'},
|
||||
{name: 'postgres', typeOF: parsedDoc.postgres, type: 'object'},
|
||||
{name: 'postgres.domain', typeOF: parsedDoc.postgres.domain, type: 'string'},
|
||||
{name: 'postgres.port', typeOF: parsedDoc.postgres.port, type: 'number'}
|
||||
{ name: "marketplace", typeOF: parsedDoc.marketplace, type: "object" },
|
||||
{ name: "marketplace.enabled", typeOF: parsedDoc.marketplace.enabled, type: "boolean" },
|
||||
{ name: "marketplace.psk", typeOF: parsedDoc.marketplace.psk, type: "string" },
|
||||
{ name: "server", typeOF: parsedDoc.server, type: "object" },
|
||||
{ name: "server.server", typeOF: parsedDoc.server.server, type: "object" },
|
||||
{ name: "server.rammerhead", typeOF: parsedDoc.server.rammerhead, type: "object" },
|
||||
{ name: "server.server.port", typeOF: parsedDoc.server.server.port, type: "number" },
|
||||
{ name: "server.server.wisp", typeOF: parsedDoc.server.server.wisp, type: "boolean" },
|
||||
{ name: "server.server.logging", typeOF: parsedDoc.server.server.logging, type: "boolean" },
|
||||
{
|
||||
name: "server.rammerhead.reverseproxy",
|
||||
typeOF: parsedDoc.server.rammerhead.reverseproxy,
|
||||
type: "boolean"
|
||||
},
|
||||
{
|
||||
name: "server.rammerhead.localstorage_sync",
|
||||
typeOF: parsedDoc.server.rammerhead.localstorage_sync,
|
||||
type: "boolean"
|
||||
},
|
||||
{ name: "server.rammerhead.http2", typeOF: parsedDoc.server.rammerhead.http2, type: "boolean" },
|
||||
{ name: "db", typeOF: parsedDoc.db, type: "object" },
|
||||
{ name: "db.name", typeOF: parsedDoc.db.name, type: "string" },
|
||||
{ name: "db.username", typeOF: parsedDoc.db.username, type: "string" },
|
||||
{ name: "db.password", typeOF: parsedDoc.db.password, type: "string" },
|
||||
{ name: "db.postgres", typeOF: parsedDoc.db.postgres, type: "boolean" },
|
||||
{ name: "postgres", typeOF: parsedDoc.postgres, type: "object" },
|
||||
{ name: "postgres.domain", typeOF: parsedDoc.postgres.domain, type: "string" },
|
||||
{ name: "postgres.port", typeOF: parsedDoc.postgres.port, type: "number" }
|
||||
]);
|
||||
|
||||
if (parsedDoc.marketplace.psk === "CHANGEME") {
|
||||
console.warn(chalk.yellow.bold('PSK should be changed from "CHANGEME"'));
|
||||
}
|
||||
if (parsedDoc.db.password === "password") {
|
||||
console.warn(chalk.red.bold('You should change your DB password!!'));
|
||||
console.warn(chalk.red.bold("You should change your DB password!!"));
|
||||
}
|
||||
|
||||
export { TomlData, parsedDoc }
|
||||
export { TomlData, parsedDoc };
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import chalk from "chalk";
|
||||
import { CatalogModel, Catalog } from "./server.js";
|
||||
import { ModelStatic } from "sequelize";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import ora from 'ora';
|
||||
import chalk from "chalk";
|
||||
import ora from "ora";
|
||||
import { ModelStatic } from "sequelize";
|
||||
import { Catalog, CatalogModel } from "./server.js";
|
||||
|
||||
interface Items extends Omit<Catalog, "background_video" | "background_image"> {
|
||||
background_video?: string
|
||||
background_image?: string
|
||||
background_video?: string;
|
||||
background_image?: string;
|
||||
}
|
||||
|
||||
async function installItems(db: ModelStatic<CatalogModel>, items: Items[]) {
|
||||
|
@ -30,35 +30,35 @@ async function installItems(db: ModelStatic<CatalogModel>, items: Items[]) {
|
|||
async function setupDB(db: ModelStatic<CatalogModel>) {
|
||||
//We have some packages that need to be installed if they aren't.
|
||||
const items: Items[] = [
|
||||
{
|
||||
package_name: 'com.nebula.gruvbox',
|
||||
title: 'Gruvbox',
|
||||
image: 'gruvbox.jpeg',
|
||||
author: 'Nebula Services',
|
||||
version: '1.0.0',
|
||||
description: 'The gruvbox theme',
|
||||
{
|
||||
package_name: "com.nebula.gruvbox",
|
||||
title: "Gruvbox",
|
||||
image: "gruvbox.jpeg",
|
||||
author: "Nebula Services",
|
||||
version: "1.0.0",
|
||||
description: "The gruvbox theme",
|
||||
tags: ["Theme", "Simple"],
|
||||
payload: "gruvbox.css",
|
||||
type: 'theme'
|
||||
type: "theme"
|
||||
},
|
||||
{
|
||||
package_name: 'com.nebula.oled',
|
||||
title: 'Oled theme',
|
||||
image: 'oled.jpg',
|
||||
author: 'Nebula Services',
|
||||
version: '1.0.0',
|
||||
description: 'A sleek & simple Oled theme for Nebula',
|
||||
tags: ['Theme', 'Simple', 'Sleek'],
|
||||
payload: 'oled.css',
|
||||
type: 'theme'
|
||||
package_name: "com.nebula.oled",
|
||||
title: "Oled theme",
|
||||
image: "oled.jpg",
|
||||
author: "Nebula Services",
|
||||
version: "1.0.0",
|
||||
description: "A sleek & simple Oled theme for Nebula",
|
||||
tags: ["Theme", "Simple", "Sleek"],
|
||||
payload: "oled.css",
|
||||
type: "theme"
|
||||
}
|
||||
]
|
||||
];
|
||||
const dbItems = await db.findAll();
|
||||
if (dbItems.length === 0) {
|
||||
const spinner = ora(chalk.hex('#7967dd')('Performing DB setup...')).start();
|
||||
const spinner = ora(chalk.hex("#7967dd")("Performing DB setup...")).start();
|
||||
await installItems(db, items);
|
||||
spinner.succeed(chalk.hex('#eb6f92')('DB setup complete!'));
|
||||
spinner.succeed(chalk.hex("#eb6f92")("DB setup complete!"));
|
||||
}
|
||||
}
|
||||
|
||||
export { setupDB }
|
||||
export { setupDB };
|
||||
|
|
2
server/env.d.ts
vendored
2
server/env.d.ts
vendored
|
@ -1 +1 @@
|
|||
declare module '@rubynetwork/rammerhead/src/server/index.js';
|
||||
declare module "@rubynetwork/rammerhead/src/server/index.js";
|
||||
|
|
237
server/server.ts
237
server/server.ts
|
@ -1,47 +1,53 @@
|
|||
import { createWriteStream } from "node:fs";
|
||||
import { constants, access, mkdir } from "node:fs/promises";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import fastifyCompress from "@fastify/compress";
|
||||
import fastifyMiddie from "@fastify/middie";
|
||||
import fastifyStatic from "@fastify/static";
|
||||
import fastifyMultipart from "@fastify/multipart";
|
||||
import Fastify, { FastifyReply, FastifyRequest } from 'fastify';
|
||||
import chalk from 'chalk';
|
||||
import { serverFactory } from "./serverFactory.js";
|
||||
import { handler as ssrHandler } from "../dist/server/entry.mjs";
|
||||
import fastifyStatic from "@fastify/static";
|
||||
import chalk from "chalk";
|
||||
import Fastify, { FastifyReply, FastifyRequest } from "fastify";
|
||||
import gradient from "gradient-string";
|
||||
import { parsedDoc } from "./config.js";
|
||||
import { DataTypes, InferAttributes, InferCreationAttributes, Model, Sequelize } from "sequelize";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
import { createWriteStream } from "node:fs";
|
||||
import { handler as ssrHandler } from "../dist/server/entry.mjs";
|
||||
import { parsedDoc } from "./config.js";
|
||||
import { setupDB } from "./dbSetup.js";
|
||||
import { access, constants, mkdir } from "node:fs/promises";
|
||||
import { serverFactory } from "./serverFactory.js";
|
||||
|
||||
|
||||
const app = Fastify({ logger: parsedDoc.server.server.logging, ignoreDuplicateSlashes: true, ignoreTrailingSlash: true, serverFactory: serverFactory });
|
||||
const app = Fastify({
|
||||
logger: parsedDoc.server.server.logging,
|
||||
ignoreDuplicateSlashes: true,
|
||||
ignoreTrailingSlash: true,
|
||||
serverFactory: serverFactory
|
||||
});
|
||||
const db = new Sequelize(parsedDoc.db.name, parsedDoc.db.username, parsedDoc.db.password, {
|
||||
host: parsedDoc.db.postgres ? `${parsedDoc.postgres.domain}` : 'localhost',
|
||||
host: parsedDoc.db.postgres ? `${parsedDoc.postgres.domain}` : "localhost",
|
||||
port: parsedDoc.db.postgres ? parsedDoc.postgres.port : undefined,
|
||||
dialect: parsedDoc.db.postgres ? 'postgres': 'sqlite',
|
||||
dialect: parsedDoc.db.postgres ? "postgres" : "sqlite",
|
||||
logging: parsedDoc.server.server.logging,
|
||||
storage: 'database.sqlite' //this is sqlite only
|
||||
storage: "database.sqlite" //this is sqlite only
|
||||
});
|
||||
|
||||
type CatalogType = "theme" | "plugin"
|
||||
type CatalogType = "theme" | "plugin";
|
||||
|
||||
interface Catalog {
|
||||
package_name: string
|
||||
title: string
|
||||
description: string
|
||||
author: string
|
||||
image: string
|
||||
tags: object
|
||||
version: string
|
||||
background_image: string
|
||||
background_video: string
|
||||
payload: string
|
||||
type: CatalogType
|
||||
package_name: string;
|
||||
title: string;
|
||||
description: string;
|
||||
author: string;
|
||||
image: string;
|
||||
tags: object;
|
||||
version: string;
|
||||
background_image: string;
|
||||
background_video: string;
|
||||
payload: string;
|
||||
type: CatalogType;
|
||||
}
|
||||
|
||||
interface CatalogModel extends Catalog, Model<InferAttributes<CatalogModel>, InferCreationAttributes<CatalogModel>> {};
|
||||
interface CatalogModel
|
||||
extends Catalog,
|
||||
Model<InferAttributes<CatalogModel>, InferCreationAttributes<CatalogModel>> {}
|
||||
|
||||
const catalogAssets = db.define<CatalogModel>("catalog_assets", {
|
||||
package_name: { type: DataTypes.STRING, unique: true },
|
||||
|
@ -58,32 +64,32 @@ const catalogAssets = db.define<CatalogModel>("catalog_assets", {
|
|||
});
|
||||
|
||||
await app.register(fastifyCompress, {
|
||||
encodings: ['br', 'gzip', 'deflate']
|
||||
encodings: ["br", "gzip", "deflate"]
|
||||
});
|
||||
|
||||
await app.register(fastifyMultipart);
|
||||
|
||||
await app.register(fastifyStatic, {
|
||||
root: fileURLToPath(new URL('../dist/client', import.meta.url)),
|
||||
decorateReply: false,
|
||||
root: fileURLToPath(new URL("../dist/client", import.meta.url)),
|
||||
decorateReply: false
|
||||
});
|
||||
|
||||
await app.register(fastifyStatic, {
|
||||
root: fileURLToPath(new URL('../database_assets', import.meta.url)),
|
||||
prefix: '/packages/',
|
||||
root: fileURLToPath(new URL("../database_assets", import.meta.url)),
|
||||
prefix: "/packages/",
|
||||
decorateReply: false
|
||||
});
|
||||
|
||||
await app.register(fastifyMiddie);
|
||||
|
||||
app.get("/api", (request, reply) => {
|
||||
reply.send({ Server: 'Active' });
|
||||
reply.send({ Server: "Active" });
|
||||
});
|
||||
|
||||
// This API returns a list of the assets in the database (SW plugins and themes).
|
||||
// It also returns the number of pages in the database.
|
||||
// It can take a `?page=x` argument to display a different page, with a limit of 20 assets per page.
|
||||
type CatalogAssetsReq = FastifyRequest<{Querystring: { page: string } }>
|
||||
type CatalogAssetsReq = FastifyRequest<{ Querystring: { page: string } }>;
|
||||
app.get("/api/catalog-assets/", async (request: CatalogAssetsReq, reply) => {
|
||||
try {
|
||||
const { page } = request.query;
|
||||
|
@ -110,70 +116,73 @@ app.get("/api/catalog-assets/", async (request: CatalogAssetsReq, reply) => {
|
|||
return acc;
|
||||
}, {});
|
||||
reply.send({ assets, pages: Math.ceil(totalItems / 20) });
|
||||
}
|
||||
catch (error) {
|
||||
reply.status(500).send({ error: 'An error occured' });
|
||||
} catch (error) {
|
||||
reply.status(500).send({ error: "An error occured" });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
type PackageReq = FastifyRequest<{Params: { package: string } }>
|
||||
type PackageReq = FastifyRequest<{ Params: { package: string } }>;
|
||||
app.get("/api/packages/:package", async (request: PackageReq, reply) => {
|
||||
try {
|
||||
const packageRow = await catalogAssets.findOne({ where: { package_name: request.params.package }});
|
||||
if (!packageRow) return reply.status(404).send({ error: 'Package not found!' });
|
||||
const packageRow = await catalogAssets.findOne({
|
||||
where: { package_name: request.params.package }
|
||||
});
|
||||
if (!packageRow) return reply.status(404).send({ error: "Package not found!" });
|
||||
const details = {
|
||||
title: packageRow.get("title"),
|
||||
description: packageRow.get('description'),
|
||||
image: packageRow.get('image'),
|
||||
author: packageRow.get('author'),
|
||||
tags: packageRow.get('tags'),
|
||||
version: packageRow.get('version'),
|
||||
background_image: packageRow.get('background_image'),
|
||||
background_video: packageRow.get('background_video'),
|
||||
payload: packageRow.get('payload'),
|
||||
type: packageRow.get('type')
|
||||
description: packageRow.get("description"),
|
||||
image: packageRow.get("image"),
|
||||
author: packageRow.get("author"),
|
||||
tags: packageRow.get("tags"),
|
||||
version: packageRow.get("version"),
|
||||
background_image: packageRow.get("background_image"),
|
||||
background_video: packageRow.get("background_video"),
|
||||
payload: packageRow.get("payload"),
|
||||
type: packageRow.get("type")
|
||||
};
|
||||
reply.send(details);
|
||||
}
|
||||
catch (error) {
|
||||
reply.status(500).send({ error: 'An unexpected error occured' });
|
||||
} catch (error) {
|
||||
reply.status(500).send({ error: "An unexpected error occured" });
|
||||
}
|
||||
});
|
||||
|
||||
type UploadReq = FastifyRequest<{Headers: { psk: string, packagename: string }}>;
|
||||
type CreateReq = FastifyRequest<{Headers: { psk: string },
|
||||
Body: {
|
||||
uuid: string,
|
||||
title: string,
|
||||
image: string,
|
||||
author: string,
|
||||
version: string,
|
||||
description: string,
|
||||
tags: object | any,
|
||||
payload: string,
|
||||
background_video: string,
|
||||
background_image: string,
|
||||
type: CatalogType
|
||||
}}>;
|
||||
type UploadReq = FastifyRequest<{ Headers: { psk: string; packagename: string } }>;
|
||||
type CreateReq = FastifyRequest<{
|
||||
Headers: { psk: string };
|
||||
Body: {
|
||||
uuid: string;
|
||||
title: string;
|
||||
image: string;
|
||||
author: string;
|
||||
version: string;
|
||||
description: string;
|
||||
tags: object | any;
|
||||
payload: string;
|
||||
background_video: string;
|
||||
background_image: string;
|
||||
type: CatalogType;
|
||||
};
|
||||
}>;
|
||||
interface VerifyStatus {
|
||||
status: number;
|
||||
error?: Error;
|
||||
}
|
||||
async function verifyReq(request: UploadReq | CreateReq, upload: Boolean, data: any): Promise<VerifyStatus> {
|
||||
async function verifyReq(
|
||||
request: UploadReq | CreateReq,
|
||||
upload: Boolean,
|
||||
data: any
|
||||
): Promise<VerifyStatus> {
|
||||
if (parsedDoc.marketplace.enabled === false) {
|
||||
return {status: 500, error: new Error('Marketplace Is disabled!')};
|
||||
return { status: 500, error: new Error("Marketplace Is disabled!") };
|
||||
} else if (request.headers.psk !== parsedDoc.marketplace.psk) {
|
||||
return { status: 403, error: new Error("PSK isn't correct!") };
|
||||
} else if (upload && !request.headers.packagename) {
|
||||
return { status: 500, error: new Error("No packagename defined!") };
|
||||
} else if (upload && !data) {
|
||||
return { status: 400, error: new Error("No file uploaded!") };
|
||||
} else {
|
||||
return { status: 200 };
|
||||
}
|
||||
else if (request.headers.psk !== parsedDoc.marketplace.psk) {
|
||||
return {status: 403, error: new Error("PSK isn't correct!")};
|
||||
}
|
||||
else if(upload && !request.headers.packagename) {
|
||||
return {status: 500, error: new Error('No packagename defined!')};
|
||||
}
|
||||
else if (upload && !data) {
|
||||
return {status: 400, error: new Error('No file uploaded!')};
|
||||
}
|
||||
else { return {status: 200 }; }
|
||||
}
|
||||
|
||||
app.post("/api/upload-asset", async (request: UploadReq, reply) => {
|
||||
|
@ -181,15 +190,25 @@ app.post("/api/upload-asset", async (request: UploadReq, reply) => {
|
|||
const verify: VerifyStatus = await verifyReq(request, true, data);
|
||||
if (verify.error !== undefined) {
|
||||
reply.status(verify.status).send({ status: verify.error.message });
|
||||
}
|
||||
else {
|
||||
try {
|
||||
await pipeline(data.file, createWriteStream(fileURLToPath(new URL(`../database_assets/${request.headers.packagename}/${data.filename}`, import.meta.url))));
|
||||
} else {
|
||||
try {
|
||||
await pipeline(
|
||||
data.file,
|
||||
createWriteStream(
|
||||
fileURLToPath(
|
||||
new URL(
|
||||
`../database_assets/${request.headers.packagename}/${data.filename}`,
|
||||
import.meta.url
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
} catch (error) {
|
||||
return reply
|
||||
.status(500)
|
||||
.send({ status: `File couldn't be uploaded! (Package most likely doesn't exist)` });
|
||||
}
|
||||
catch (error) {
|
||||
return reply.status(500).send({ status: `File couldn't be uploaded! (Package most likely doesn't exist)` });
|
||||
}
|
||||
return reply.status(verify.status).send({ status: 'File uploaded successfully!' });
|
||||
return reply.status(verify.status).send({ status: "File uploaded successfully!" });
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -197,9 +216,8 @@ app.post("/api/create-package", async (request: CreateReq, reply) => {
|
|||
const verify: VerifyStatus = await verifyReq(request, false, undefined);
|
||||
if (verify.error !== undefined) {
|
||||
reply.status(verify.status).send({ status: verify.error.message });
|
||||
}
|
||||
else {
|
||||
const body: Catalog = {
|
||||
} else {
|
||||
const body: Catalog = {
|
||||
package_name: request.body.uuid,
|
||||
title: request.body.title,
|
||||
image: request.body.image,
|
||||
|
@ -211,8 +229,8 @@ app.post("/api/create-package", async (request: CreateReq, reply) => {
|
|||
background_video: request.body.background_video,
|
||||
background_image: request.body.background_image,
|
||||
type: request.body.type as CatalogType
|
||||
}
|
||||
await catalogAssets.create({
|
||||
};
|
||||
await catalogAssets.create({
|
||||
package_name: body.package_name,
|
||||
title: body.title,
|
||||
image: body.image,
|
||||
|
@ -225,40 +243,47 @@ app.post("/api/create-package", async (request: CreateReq, reply) => {
|
|||
background_image: body.background_image,
|
||||
type: body.type
|
||||
});
|
||||
const assets = fileURLToPath(new URL('../database_assets', import.meta.url));
|
||||
const assets = fileURLToPath(new URL("../database_assets", import.meta.url));
|
||||
try {
|
||||
await access(`${assets}/${body.package_name}/`, constants.F_OK);
|
||||
return reply.status(500).send({ status: 'Package already exists!' });
|
||||
}
|
||||
catch (err) {
|
||||
return reply.status(500).send({ status: "Package already exists!" });
|
||||
} catch (err) {
|
||||
await mkdir(`${assets}/${body.package_name}/`);
|
||||
return reply.status(verify.status).send({ status: 'Package created successfully!' });
|
||||
return reply.status(verify.status).send({ status: "Package created successfully!" });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
app.use(ssrHandler);
|
||||
|
||||
const port: number = parseInt(process.env.PORT as string) || parsedDoc.server.server.port || parseInt('8080');
|
||||
const port: number =
|
||||
parseInt(process.env.PORT as string) || parsedDoc.server.server.port || parseInt("8080");
|
||||
const titleText = `
|
||||
_ _ _ _ ____ _
|
||||
| \\ | | ___| |__ _ _| | __ _ / ___| ___ _ ____ _(_) ___ ___ ___
|
||||
| \\| |/ _ \\ '_ \\| | | | |/ _' | \\___ \\ / _ \\ '__\\ \\ / / |/ __/ _ \\/ __|
|
||||
| |\\ | __/ |_) | |_| | | (_| | ___) | __/ | \\ V /| | (_| __/\\__ \\
|
||||
|_| \\_|\\___|_.__/ \\__,_|_|\\__,_| |____/ \\___|_| \\_/ |_|\\___\\___||___/
|
||||
`
|
||||
`;
|
||||
const titleColors = {
|
||||
purple: "#7967dd",
|
||||
pink: "#eb6f92"
|
||||
};
|
||||
|
||||
|
||||
console.log(gradient(Object.values(titleColors)).multiline(titleText as string));
|
||||
app.listen({ port: port, host: '0.0.0.0' }).then(async () => {
|
||||
console.log(chalk.hex('#7967dd')(`Server listening on ${chalk.hex('#eb6f92').bold('http://localhost:' + port + '/')}`));
|
||||
console.log(chalk.hex('#7967dd')(`Server also listening on ${chalk.hex('#eb6f92').bold('http://0.0.0.0:' + port + '/')}`));
|
||||
await catalogAssets.sync()
|
||||
app.listen({ port: port, host: "0.0.0.0" }).then(async () => {
|
||||
console.log(
|
||||
chalk.hex("#7967dd")(
|
||||
`Server listening on ${chalk.hex("#eb6f92").bold("http://localhost:" + port + "/")}`
|
||||
)
|
||||
);
|
||||
console.log(
|
||||
chalk.hex("#7967dd")(
|
||||
`Server also listening on ${chalk.hex("#eb6f92").bold("http://0.0.0.0:" + port + "/")}`
|
||||
)
|
||||
);
|
||||
await catalogAssets.sync();
|
||||
await setupDB(catalogAssets);
|
||||
});
|
||||
|
||||
export { CatalogModel, Catalog }
|
||||
export { CatalogModel, Catalog };
|
||||
|
|
|
@ -1,43 +1,43 @@
|
|||
import { createServer } from 'node:http';
|
||||
import wisp from 'wisp-server-node';
|
||||
import rammerhead from '@rubynetwork/rammerhead';
|
||||
import { FastifyServerFactory, FastifyServerFactoryHandler, RawServerDefault } from 'fastify';
|
||||
import { parsedDoc } from './config.js';
|
||||
import { LOG_LEVEL, WispOptions } from 'wisp-server-node/dist/Types.js';
|
||||
import { createServer } from "node:http";
|
||||
import rammerhead from "@rubynetwork/rammerhead";
|
||||
import { FastifyServerFactory, FastifyServerFactoryHandler, RawServerDefault } from "fastify";
|
||||
import wisp from "wisp-server-node";
|
||||
import { LOG_LEVEL, WispOptions } from "wisp-server-node/dist/Types.js";
|
||||
import { parsedDoc } from "./config.js";
|
||||
|
||||
const rh = rammerhead.createRammerhead({
|
||||
logLevel: parsedDoc.server.server.logging ? 'debug' : 'disabled',
|
||||
logLevel: parsedDoc.server.server.logging ? "debug" : "disabled",
|
||||
reverseProxy: parsedDoc.server.rammerhead.reverseproxy,
|
||||
disableLocalStorageSync: parsedDoc.server.rammerhead.localstorage_sync ? false : true,
|
||||
disableHttp2: parsedDoc.server.rammerhead.http2 ? false : true
|
||||
disableHttp2: parsedDoc.server.rammerhead.http2 ? false : true
|
||||
});
|
||||
|
||||
const wispOptions: WispOptions = {
|
||||
logLevel: parsedDoc.server.server.logging ? LOG_LEVEL.DEBUG : LOG_LEVEL.NONE,
|
||||
pingInterval: 30
|
||||
}
|
||||
};
|
||||
|
||||
const serverFactory: FastifyServerFactory = (handler: FastifyServerFactoryHandler): RawServerDefault => {
|
||||
const serverFactory: FastifyServerFactory = (
|
||||
handler: FastifyServerFactoryHandler
|
||||
): RawServerDefault => {
|
||||
const httpServer = createServer();
|
||||
httpServer.on('request', (req, res) => {
|
||||
httpServer.on("request", (req, res) => {
|
||||
if (rammerhead.shouldRouteRh(req)) {
|
||||
rammerhead.routeRhRequest(rh, req, res);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
handler(req, res);
|
||||
}
|
||||
});
|
||||
httpServer.on('upgrade', (req, socket, head) => {
|
||||
httpServer.on("upgrade", (req, socket, head) => {
|
||||
if (rammerhead.shouldRouteRh(req)) {
|
||||
rammerhead.routeRhUpgrade(rh, req, socket, head);
|
||||
}
|
||||
else if (parsedDoc.server.server.wisp) {
|
||||
if (req.url?.endsWith('/wisp/')) {
|
||||
} else if (parsedDoc.server.server.wisp) {
|
||||
if (req.url?.endsWith("/wisp/")) {
|
||||
wisp.routeRequest(req, socket as any, head, wispOptions);
|
||||
}
|
||||
}
|
||||
});
|
||||
return httpServer;
|
||||
}
|
||||
};
|
||||
|
||||
export { serverFactory };
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import { Image } from "astro:assets";
|
||||
import { type ImageMetadata } from "astro";
|
||||
const images = import.meta.glob<{ default: ImageMetadata }>(
|
||||
'/src/assets/contribs/*.{jpeg,jpg,png,gif,webp}'
|
||||
"/src/assets/contribs/*.{jpeg,jpg,png,gif,webp}"
|
||||
);
|
||||
|
||||
interface Props {
|
||||
|
|
|
@ -2,8 +2,11 @@ function pageLoad(fn: () => void, logging?: boolean) {
|
|||
document.addEventListener("astro:page-load", () => {
|
||||
try {
|
||||
fn();
|
||||
} catch (err) {
|
||||
if (logging) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
catch (err) { if (logging) { console.error(err) } }
|
||||
});
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue