fix: cleanupStuckedContainers
This commit is contained in:
4
apps/trpc-experimental/server/.env-template
Normal file
4
apps/trpc-experimental/server/.env-template
Normal file
@@ -0,0 +1,4 @@
|
||||
NODE_ENV="development"
|
||||
|
||||
COOLIFY_DATABASE_URL="file:../db/dev.db"
|
||||
COOLIFY_SECRET_KEY="32-character-long-secret-key"
|
||||
9
apps/trpc-experimental/server/.prettierrc
Normal file
9
apps/trpc-experimental/server/.prettierrc
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"useTabs": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"printWidth": 100,
|
||||
"plugins": ["prettier-plugin-svelte"],
|
||||
"pluginSearchDirs": ["."],
|
||||
"overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }]
|
||||
}
|
||||
31
apps/trpc-experimental/server/build/api/index.js
Normal file
31
apps/trpc-experimental/server/build/api/index.js
Normal file
@@ -0,0 +1,31 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var api_exports = {};
|
||||
__export(api_exports, {
|
||||
default: () => api_default
|
||||
});
|
||||
module.exports = __toCommonJS(api_exports);
|
||||
const root = async (fastify) => {
|
||||
fastify.get("/", async function(_request, _reply) {
|
||||
return { status: "ok" };
|
||||
});
|
||||
};
|
||||
var api_default = root;
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
32
apps/trpc-experimental/server/build/config.js
Normal file
32
apps/trpc-experimental/server/build/config.js
Normal file
@@ -0,0 +1,32 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var config_exports = {};
|
||||
__export(config_exports, {
|
||||
serverConfig: () => serverConfig
|
||||
});
|
||||
module.exports = __toCommonJS(config_exports);
|
||||
const serverConfig = {
|
||||
dev: false,
|
||||
port: 2022,
|
||||
prefix: "/trpc"
|
||||
};
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
serverConfig
|
||||
});
|
||||
18
apps/trpc-experimental/server/build/env.js
Normal file
18
apps/trpc-experimental/server/build/env.js
Normal file
@@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
const dotenv = require("dotenv");
|
||||
dotenv.config();
|
||||
const { z } = require("zod");
|
||||
const envSchema = z.object({
|
||||
CODESANDBOX_HOST: z.string().optional(),
|
||||
NODE_ENV: z.enum(["development", "test", "production"]),
|
||||
COOLIFY_DATABASE_URL: z.string(),
|
||||
COOLIFY_SECRET_KEY: z.string().length(32),
|
||||
COOLIFY_WHITE_LABELED: z.string().optional(),
|
||||
COOLIFY_WHITE_LABELED_ICON: z.string().optional()
|
||||
});
|
||||
const env = envSchema.safeParse(process.env);
|
||||
if (!env.success) {
|
||||
console.error("\u274C Invalid environment variables:", JSON.stringify(env.error.format(), null, 4));
|
||||
process.exit(1);
|
||||
}
|
||||
module.exports.env = env.data;
|
||||
5
apps/trpc-experimental/server/build/index.js
Normal file
5
apps/trpc-experimental/server/build/index.js
Normal file
@@ -0,0 +1,5 @@
|
||||
"use strict";
|
||||
var import_config = require("./config");
|
||||
var import_server = require("./server");
|
||||
const server = (0, import_server.createServer)(import_config.serverConfig);
|
||||
server.start();
|
||||
@@ -0,0 +1,813 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var import_node_worker_threads = require("node:worker_threads");
|
||||
var import_crypto = __toESM(require("crypto"));
|
||||
var import_promises = __toESM(require("fs/promises"));
|
||||
var import_js_yaml = __toESM(require("js-yaml"));
|
||||
var import_common = require("../lib/buildPacks/common");
|
||||
var import_common2 = require("../lib/common");
|
||||
var importers = __toESM(require("../lib/importers"));
|
||||
var buildpacks = __toESM(require("../lib/buildPacks"));
|
||||
var import_prisma = require("../prisma");
|
||||
var import_executeCommand = require("../lib/executeCommand");
|
||||
var import_docker = require("../lib/docker");
|
||||
(async () => {
|
||||
if (import_node_worker_threads.parentPort) {
|
||||
import_node_worker_threads.parentPort.on("message", async (message) => {
|
||||
if (message === "error")
|
||||
throw new Error("oops");
|
||||
if (message === "cancel") {
|
||||
import_node_worker_threads.parentPort.postMessage("cancelled");
|
||||
await import_prisma.prisma.$disconnect();
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
const pThrottle = await import("p-throttle");
|
||||
const throttle = pThrottle.default({
|
||||
limit: 1,
|
||||
interval: 2e3
|
||||
});
|
||||
const th = throttle(async () => {
|
||||
try {
|
||||
const queuedBuilds = await import_prisma.prisma.build.findMany({
|
||||
where: { status: { in: ["queued", "running"] } },
|
||||
orderBy: { createdAt: "asc" }
|
||||
});
|
||||
const { concurrentBuilds } = await import_prisma.prisma.setting.findFirst({});
|
||||
if (queuedBuilds.length > 0) {
|
||||
import_node_worker_threads.parentPort.postMessage({ deploying: true });
|
||||
const concurrency = concurrentBuilds;
|
||||
const pAll = await import("p-all");
|
||||
const actions = [];
|
||||
for (const queueBuild of queuedBuilds) {
|
||||
actions.push(async () => {
|
||||
let application = await import_prisma.prisma.application.findUnique({
|
||||
where: { id: queueBuild.applicationId },
|
||||
include: {
|
||||
dockerRegistry: true,
|
||||
destinationDocker: true,
|
||||
gitSource: { include: { githubApp: true, gitlabApp: true } },
|
||||
persistentStorage: true,
|
||||
secrets: true,
|
||||
settings: true,
|
||||
teams: true
|
||||
}
|
||||
});
|
||||
let {
|
||||
id: buildId,
|
||||
type,
|
||||
gitSourceId,
|
||||
sourceBranch = null,
|
||||
pullmergeRequestId = null,
|
||||
previewApplicationId = null,
|
||||
forceRebuild,
|
||||
sourceRepository = null
|
||||
} = queueBuild;
|
||||
application = (0, import_common2.decryptApplication)(application);
|
||||
if (!gitSourceId && application.simpleDockerfile) {
|
||||
const {
|
||||
id: applicationId2,
|
||||
destinationDocker: destinationDocker2,
|
||||
destinationDockerId: destinationDockerId2,
|
||||
secrets: secrets2,
|
||||
port: port2,
|
||||
persistentStorage: persistentStorage2,
|
||||
exposePort: exposePort2,
|
||||
simpleDockerfile,
|
||||
dockerRegistry: dockerRegistry2
|
||||
} = application;
|
||||
const { workdir: workdir2 } = await (0, import_common2.createDirectories)({ repository: applicationId2, buildId });
|
||||
try {
|
||||
if (queueBuild.status === "running") {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: "Building halted, restarting...",
|
||||
buildId,
|
||||
applicationId: application.id
|
||||
});
|
||||
}
|
||||
const volumes = persistentStorage2?.map((storage) => {
|
||||
if (storage.oldPath) {
|
||||
return `${applicationId2}${storage.path.replace(/\//gi, "-").replace("-app", "")}:${storage.path}`;
|
||||
}
|
||||
return `${applicationId2}${storage.path.replace(/\//gi, "-")}:${storage.path}`;
|
||||
}) || [];
|
||||
if (destinationDockerId2) {
|
||||
await import_prisma.prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: { status: "running" }
|
||||
});
|
||||
try {
|
||||
const { stdout: containers } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId2,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${applicationId2}' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split("\n");
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId2,
|
||||
command: `docker stop -t 0 ${container}`
|
||||
});
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId2,
|
||||
command: `docker rm --force ${container}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
let envs = [];
|
||||
if (secrets2.length > 0) {
|
||||
envs = [
|
||||
...envs,
|
||||
...(0, import_common2.generateSecrets)(secrets2, pullmergeRequestId, false, port2)
|
||||
];
|
||||
}
|
||||
await import_promises.default.writeFile(`${workdir2}/Dockerfile`, simpleDockerfile);
|
||||
if (dockerRegistry2) {
|
||||
const { url, username, password } = dockerRegistry2;
|
||||
await (0, import_common.saveDockerRegistryCredentials)({ url, username, password, workdir: workdir2 });
|
||||
}
|
||||
const labels = (0, import_common.makeLabelForSimpleDockerfile)({
|
||||
applicationId: applicationId2,
|
||||
type,
|
||||
port: exposePort2 ? `${exposePort2}:${port2}` : port2
|
||||
});
|
||||
try {
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(":")[0]}`]: {
|
||||
name: volume.split(":")[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: "3.8",
|
||||
services: {
|
||||
[applicationId2]: {
|
||||
build: {
|
||||
context: workdir2
|
||||
},
|
||||
image: `${applicationId2}:${buildId}`,
|
||||
container_name: applicationId2,
|
||||
volumes,
|
||||
labels,
|
||||
environment: envs,
|
||||
depends_on: [],
|
||||
expose: [port2],
|
||||
...exposePort2 ? { ports: [`${exposePort2}:${port2}`] } : {},
|
||||
...(0, import_docker.defaultComposeConfiguration)(destinationDocker2.network)
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[destinationDocker2.network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await import_promises.default.writeFile(`${workdir2}/docker-compose.yml`, import_js_yaml.default.dump(composeFile));
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
debug: true,
|
||||
dockerId: destinationDocker2.id,
|
||||
command: `docker compose --project-directory ${workdir2} up -d`
|
||||
});
|
||||
await (0, import_common.saveBuildLog)({ line: "Deployed \u{1F389}", buildId, applicationId: applicationId2 });
|
||||
} catch (error) {
|
||||
await (0, import_common.saveBuildLog)({ line: error, buildId, applicationId: applicationId2 });
|
||||
const foundBuild = await import_prisma.prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (foundBuild) {
|
||||
await import_prisma.prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: "failed"
|
||||
}
|
||||
});
|
||||
}
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const foundBuild = await import_prisma.prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (foundBuild) {
|
||||
await import_prisma.prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: "failed"
|
||||
}
|
||||
});
|
||||
}
|
||||
if (error !== 1) {
|
||||
await (0, import_common.saveBuildLog)({ line: error, buildId, applicationId: application.id });
|
||||
}
|
||||
if (error instanceof Error) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: error.message,
|
||||
buildId,
|
||||
applicationId: application.id
|
||||
});
|
||||
}
|
||||
await import_promises.default.rm(workdir2, { recursive: true, force: true });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (application.dockerRegistryImageName) {
|
||||
const customTag2 = application.dockerRegistryImageName.split(":")[1] || buildId;
|
||||
const imageName2 = application.dockerRegistryImageName.split(":")[0];
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Pushing ${imageName2}:${customTag2} to Docker Registry... It could take a while...`,
|
||||
buildId,
|
||||
applicationId: application.id
|
||||
});
|
||||
await (0, import_common2.pushToRegistry)(application, workdir2, buildId, imageName2, customTag2);
|
||||
await (0, import_common.saveBuildLog)({ line: "Success", buildId, applicationId: application.id });
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.stdout) {
|
||||
await (0, import_common.saveBuildLog)({ line: error.stdout, buildId, applicationId: applicationId2 });
|
||||
}
|
||||
if (error.stderr) {
|
||||
await (0, import_common.saveBuildLog)({ line: error.stderr, buildId, applicationId: applicationId2 });
|
||||
}
|
||||
} finally {
|
||||
await import_promises.default.rm(workdir2, { recursive: true, force: true });
|
||||
await import_prisma.prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: { status: "success" }
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
const originalApplicationId = application.id;
|
||||
const {
|
||||
id: applicationId,
|
||||
name,
|
||||
destinationDocker,
|
||||
destinationDockerId,
|
||||
gitSource,
|
||||
configHash,
|
||||
fqdn,
|
||||
projectId,
|
||||
secrets,
|
||||
phpModules,
|
||||
settings,
|
||||
persistentStorage,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
denoOptions,
|
||||
exposePort,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
gitCommitHash,
|
||||
dockerRegistry
|
||||
} = application;
|
||||
let {
|
||||
branch,
|
||||
repository,
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
dockerComposeConfiguration,
|
||||
denoMainFile
|
||||
} = application;
|
||||
let imageId = applicationId;
|
||||
let domain = (0, import_common2.getDomain)(fqdn);
|
||||
let location = null;
|
||||
let tag = null;
|
||||
let customTag = null;
|
||||
let imageName = null;
|
||||
let imageFoundLocally = false;
|
||||
let imageFoundRemotely = false;
|
||||
if (pullmergeRequestId) {
|
||||
const previewApplications = await import_prisma.prisma.previewApplication.findMany({
|
||||
where: { applicationId: originalApplicationId, pullmergeRequestId }
|
||||
});
|
||||
if (previewApplications.length > 0) {
|
||||
previewApplicationId = previewApplications[0].id;
|
||||
}
|
||||
branch = sourceBranch;
|
||||
domain = `${pullmergeRequestId}.${domain}`;
|
||||
imageId = `${applicationId}-${pullmergeRequestId}`;
|
||||
repository = sourceRepository || repository;
|
||||
}
|
||||
const { workdir, repodir } = await (0, import_common2.createDirectories)({ repository, buildId });
|
||||
try {
|
||||
if (queueBuild.status === "running") {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: "Building halted, restarting...",
|
||||
buildId,
|
||||
applicationId: application.id
|
||||
});
|
||||
}
|
||||
const currentHash = import_crypto.default.createHash("sha256").update(
|
||||
JSON.stringify({
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
deploymentType,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
buildPack,
|
||||
port,
|
||||
exposePort,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
secrets,
|
||||
branch,
|
||||
repository,
|
||||
fqdn
|
||||
})
|
||||
).digest("hex");
|
||||
const { debug } = settings;
|
||||
if (!debug) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Debug logging is disabled. Enable it above if necessary!`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
const volumes = persistentStorage?.map((storage) => {
|
||||
if (storage.oldPath) {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, "-").replace("-app", "")}:${storage.path}`;
|
||||
}
|
||||
return `${applicationId}${storage.path.replace(/\//gi, "-")}:${storage.path}`;
|
||||
}) || [];
|
||||
try {
|
||||
dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration);
|
||||
} catch (error) {
|
||||
}
|
||||
let deployNeeded = true;
|
||||
let destinationType;
|
||||
if (destinationDockerId) {
|
||||
destinationType = "docker";
|
||||
}
|
||||
if (destinationType === "docker") {
|
||||
await import_prisma.prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: { status: "running" }
|
||||
});
|
||||
const configuration = await (0, import_common.setDefaultConfiguration)(application);
|
||||
buildPack = configuration.buildPack;
|
||||
port = configuration.port;
|
||||
installCommand = configuration.installCommand;
|
||||
startCommand = configuration.startCommand;
|
||||
buildCommand = configuration.buildCommand;
|
||||
publishDirectory = configuration.publishDirectory;
|
||||
baseDirectory = configuration.baseDirectory || "";
|
||||
dockerFileLocation = configuration.dockerFileLocation;
|
||||
dockerComposeFileLocation = configuration.dockerComposeFileLocation;
|
||||
denoMainFile = configuration.denoMainFile;
|
||||
const commit = await importers[gitSource.type]({
|
||||
applicationId,
|
||||
debug,
|
||||
workdir,
|
||||
repodir,
|
||||
githubAppId: gitSource.githubApp?.id,
|
||||
gitlabAppId: gitSource.gitlabApp?.id,
|
||||
customPort: gitSource.customPort,
|
||||
gitCommitHash,
|
||||
configuration,
|
||||
repository,
|
||||
branch,
|
||||
buildId,
|
||||
apiUrl: gitSource.apiUrl,
|
||||
htmlUrl: gitSource.htmlUrl,
|
||||
projectId,
|
||||
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
|
||||
privateSshKey: (0, import_common2.decrypt)(gitSource.gitlabApp?.privateSshKey) || null,
|
||||
forPublic: gitSource.forPublic
|
||||
});
|
||||
if (!commit) {
|
||||
throw new Error("No commit found?");
|
||||
}
|
||||
tag = commit.slice(0, 7);
|
||||
if (pullmergeRequestId) {
|
||||
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
||||
}
|
||||
if (application.dockerRegistryImageName) {
|
||||
imageName = application.dockerRegistryImageName.split(":")[0];
|
||||
customTag = application.dockerRegistryImageName.split(":")[1] || tag;
|
||||
} else {
|
||||
customTag = tag;
|
||||
imageName = applicationId;
|
||||
}
|
||||
if (pullmergeRequestId) {
|
||||
customTag = `${customTag}-${pullmergeRequestId}`;
|
||||
}
|
||||
try {
|
||||
await import_prisma.prisma.build.update({ where: { id: buildId }, data: { commit } });
|
||||
} catch (err) {
|
||||
}
|
||||
if (!pullmergeRequestId) {
|
||||
if (configHash !== currentHash) {
|
||||
deployNeeded = true;
|
||||
if (configHash) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: "Configuration changed",
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
} else {
|
||||
deployNeeded = false;
|
||||
}
|
||||
} else {
|
||||
deployNeeded = true;
|
||||
}
|
||||
try {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker image inspect ${applicationId}:${tag}`
|
||||
});
|
||||
imageFoundLocally = true;
|
||||
} catch (error) {
|
||||
}
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry;
|
||||
location = await (0, import_common.saveDockerRegistryCredentials)({
|
||||
url,
|
||||
username,
|
||||
password,
|
||||
workdir
|
||||
});
|
||||
}
|
||||
try {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker ${location ? `--config ${location}` : ""} pull ${imageName}:${customTag}`
|
||||
});
|
||||
imageFoundRemotely = true;
|
||||
} catch (error) {
|
||||
}
|
||||
let imageFound = `${applicationId}:${tag}`;
|
||||
if (imageFoundRemotely) {
|
||||
imageFound = `${imageName}:${customTag}`;
|
||||
}
|
||||
await (0, import_common.copyBaseConfigurationFiles)(
|
||||
buildPack,
|
||||
workdir,
|
||||
buildId,
|
||||
applicationId,
|
||||
baseImage
|
||||
);
|
||||
const labels = (0, import_common.makeLabelForStandaloneApplication)({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
});
|
||||
if (forceRebuild)
|
||||
deployNeeded = true;
|
||||
if (!imageFoundLocally && !imageFoundRemotely || deployNeeded) {
|
||||
if (buildPack === "static") {
|
||||
await buildpacks.staticApp({
|
||||
dockerId: destinationDocker.id,
|
||||
network: destinationDocker.network,
|
||||
buildId,
|
||||
applicationId,
|
||||
domain,
|
||||
name,
|
||||
type,
|
||||
volumes,
|
||||
labels,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
publishDirectory,
|
||||
debug,
|
||||
commit,
|
||||
tag,
|
||||
workdir,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
phpModules,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
dockerFileLocation,
|
||||
dockerComposeConfiguration,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
forceRebuild
|
||||
});
|
||||
} else if (buildpacks[buildPack])
|
||||
await buildpacks[buildPack]({
|
||||
dockerId: destinationDocker.id,
|
||||
network: destinationDocker.network,
|
||||
buildId,
|
||||
applicationId,
|
||||
domain,
|
||||
name,
|
||||
type,
|
||||
volumes,
|
||||
labels,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
publishDirectory,
|
||||
debug,
|
||||
commit,
|
||||
tag,
|
||||
workdir,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
phpModules,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
dockerFileLocation,
|
||||
dockerComposeConfiguration,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
forceRebuild
|
||||
});
|
||||
else {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Build pack ${buildPack} not found`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
throw new Error(`Build pack ${buildPack} not found.`);
|
||||
}
|
||||
} else {
|
||||
if (imageFoundRemotely || deployNeeded) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Container image ${imageFound} found in Docker Registry - reuising it`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
} else {
|
||||
if (imageFoundLocally || deployNeeded) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Container image ${imageFound} found locally - reuising it`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if (buildPack === "compose") {
|
||||
try {
|
||||
const { stdout: containers } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split("\n");
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker stop -t 0 ${container}`
|
||||
});
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker rm --force ${container}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
try {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
});
|
||||
await (0, import_common.saveBuildLog)({ line: "Deployed \u{1F389}", buildId, applicationId });
|
||||
await import_prisma.prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: { status: "success" }
|
||||
});
|
||||
await import_prisma.prisma.application.update({
|
||||
where: { id: applicationId },
|
||||
data: { configHash: currentHash }
|
||||
});
|
||||
} catch (error) {
|
||||
await (0, import_common.saveBuildLog)({ line: error, buildId, applicationId });
|
||||
const foundBuild = await import_prisma.prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (foundBuild) {
|
||||
await import_prisma.prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: "failed"
|
||||
}
|
||||
});
|
||||
}
|
||||
throw new Error(error);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
const { stdout: containers } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${pullmergeRequestId ? imageId : applicationId}' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split("\n");
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker stop -t 0 ${container}`
|
||||
});
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker rm --force ${container}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
let envs = [];
|
||||
if (secrets.length > 0) {
|
||||
envs = [
|
||||
...envs,
|
||||
...(0, import_common2.generateSecrets)(secrets, pullmergeRequestId, false, port)
|
||||
];
|
||||
}
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry;
|
||||
await (0, import_common.saveDockerRegistryCredentials)({ url, username, password, workdir });
|
||||
}
|
||||
try {
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(":")[0]}`]: {
|
||||
name: volume.split(":")[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: "3.8",
|
||||
services: {
|
||||
[imageId]: {
|
||||
image: imageFound,
|
||||
container_name: imageId,
|
||||
volumes,
|
||||
environment: envs,
|
||||
labels,
|
||||
depends_on: [],
|
||||
expose: [port],
|
||||
...exposePort ? { ports: [`${exposePort}:${port}`] } : {},
|
||||
...(0, import_docker.defaultComposeConfiguration)(destinationDocker.network)
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[destinationDocker.network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await import_promises.default.writeFile(`${workdir}/docker-compose.yml`, import_js_yaml.default.dump(composeFile));
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
debug,
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
});
|
||||
await (0, import_common.saveBuildLog)({ line: "Deployed \u{1F389}", buildId, applicationId });
|
||||
} catch (error) {
|
||||
await (0, import_common.saveBuildLog)({ line: error, buildId, applicationId });
|
||||
const foundBuild = await import_prisma.prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (foundBuild) {
|
||||
await import_prisma.prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: "failed"
|
||||
}
|
||||
});
|
||||
}
|
||||
throw new Error(error);
|
||||
}
|
||||
if (!pullmergeRequestId)
|
||||
await import_prisma.prisma.application.update({
|
||||
where: { id: applicationId },
|
||||
data: { configHash: currentHash }
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const foundBuild = await import_prisma.prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (foundBuild) {
|
||||
await import_prisma.prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: "failed"
|
||||
}
|
||||
});
|
||||
}
|
||||
if (error !== 1) {
|
||||
await (0, import_common.saveBuildLog)({ line: error, buildId, applicationId: application.id });
|
||||
}
|
||||
if (error instanceof Error) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: error.message,
|
||||
buildId,
|
||||
applicationId: application.id
|
||||
});
|
||||
}
|
||||
await import_promises.default.rm(workdir, { recursive: true, force: true });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (application.dockerRegistryImageName && (!imageFoundRemotely || forceRebuild)) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`,
|
||||
buildId,
|
||||
applicationId: application.id
|
||||
});
|
||||
await (0, import_common2.pushToRegistry)(application, workdir, tag, imageName, customTag);
|
||||
await (0, import_common.saveBuildLog)({ line: "Success", buildId, applicationId: application.id });
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.stdout) {
|
||||
await (0, import_common.saveBuildLog)({ line: error.stdout, buildId, applicationId });
|
||||
}
|
||||
if (error.stderr) {
|
||||
await (0, import_common.saveBuildLog)({ line: error.stderr, buildId, applicationId });
|
||||
}
|
||||
} finally {
|
||||
await import_promises.default.rm(workdir, { recursive: true, force: true });
|
||||
await import_prisma.prisma.build.update({ where: { id: buildId }, data: { status: "success" } });
|
||||
}
|
||||
});
|
||||
}
|
||||
await pAll.default(actions, { concurrency });
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
});
|
||||
while (true) {
|
||||
await th();
|
||||
}
|
||||
} else {
|
||||
console.log("hello");
|
||||
process.exit(0);
|
||||
}
|
||||
})();
|
||||
842
apps/trpc-experimental/server/build/lib/buildPacks/common.js
Normal file
842
apps/trpc-experimental/server/build/lib/buildPacks/common.js
Normal file
@@ -0,0 +1,842 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var common_exports = {};
|
||||
__export(common_exports, {
|
||||
buildCacheImageForLaravel: () => buildCacheImageForLaravel,
|
||||
buildCacheImageWithCargo: () => buildCacheImageWithCargo,
|
||||
buildCacheImageWithNode: () => buildCacheImageWithNode,
|
||||
buildImage: () => buildImage,
|
||||
checkPnpm: () => checkPnpm,
|
||||
copyBaseConfigurationFiles: () => copyBaseConfigurationFiles,
|
||||
makeLabelForSimpleDockerfile: () => makeLabelForSimpleDockerfile,
|
||||
makeLabelForStandaloneApplication: () => makeLabelForStandaloneApplication,
|
||||
saveBuildLog: () => saveBuildLog,
|
||||
saveDockerRegistryCredentials: () => saveDockerRegistryCredentials,
|
||||
scanningTemplates: () => scanningTemplates,
|
||||
setDefaultBaseImage: () => setDefaultBaseImage,
|
||||
setDefaultConfiguration: () => setDefaultConfiguration
|
||||
});
|
||||
module.exports = __toCommonJS(common_exports);
|
||||
var import_common = require("../common");
|
||||
var import_fs = require("fs");
|
||||
var import_dayjs = require("../dayjs");
|
||||
var import_prisma = require("../../prisma");
|
||||
var import_executeCommand = require("../executeCommand");
|
||||
const staticApps = ["static", "react", "vuejs", "svelte", "gatsby", "astro", "eleventy"];
|
||||
const nodeBased = [
|
||||
"react",
|
||||
"preact",
|
||||
"vuejs",
|
||||
"svelte",
|
||||
"gatsby",
|
||||
"astro",
|
||||
"eleventy",
|
||||
"node",
|
||||
"nestjs",
|
||||
"nuxtjs",
|
||||
"nextjs"
|
||||
];
|
||||
function setDefaultBaseImage(buildPack, deploymentType = null) {
|
||||
const nodeVersions = [
|
||||
{
|
||||
value: "node:lts",
|
||||
label: "node:lts"
|
||||
},
|
||||
{
|
||||
value: "node:18",
|
||||
label: "node:18"
|
||||
},
|
||||
{
|
||||
value: "node:17",
|
||||
label: "node:17"
|
||||
},
|
||||
{
|
||||
value: "node:16",
|
||||
label: "node:16"
|
||||
},
|
||||
{
|
||||
value: "node:14",
|
||||
label: "node:14"
|
||||
},
|
||||
{
|
||||
value: "node:12",
|
||||
label: "node:12"
|
||||
}
|
||||
];
|
||||
const staticVersions = [
|
||||
{
|
||||
value: "webdevops/nginx:alpine",
|
||||
label: "webdevops/nginx:alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/apache:alpine",
|
||||
label: "webdevops/apache:alpine"
|
||||
},
|
||||
{
|
||||
value: "nginx:alpine",
|
||||
label: "nginx:alpine"
|
||||
},
|
||||
{
|
||||
value: "httpd:alpine",
|
||||
label: "httpd:alpine (Apache)"
|
||||
}
|
||||
];
|
||||
const rustVersions = [
|
||||
{
|
||||
value: "rust:latest",
|
||||
label: "rust:latest"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60",
|
||||
label: "rust:1.60"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-buster",
|
||||
label: "rust:1.60-buster"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-bullseye",
|
||||
label: "rust:1.60-bullseye"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-slim-buster",
|
||||
label: "rust:1.60-slim-buster"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-slim-bullseye",
|
||||
label: "rust:1.60-slim-bullseye"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-alpine3.14",
|
||||
label: "rust:1.60-alpine3.14"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-alpine3.15",
|
||||
label: "rust:1.60-alpine3.15"
|
||||
}
|
||||
];
|
||||
const phpVersions = [
|
||||
{
|
||||
value: "webdevops/php-apache:8.2",
|
||||
label: "webdevops/php-apache:8.2"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.2",
|
||||
label: "webdevops/php-nginx:8.2"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.1",
|
||||
label: "webdevops/php-apache:8.1"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.1",
|
||||
label: "webdevops/php-nginx:8.1"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.0",
|
||||
label: "webdevops/php-apache:8.0"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.0",
|
||||
label: "webdevops/php-nginx:8.0"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.4",
|
||||
label: "webdevops/php-apache:7.4"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.4",
|
||||
label: "webdevops/php-nginx:7.4"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.3",
|
||||
label: "webdevops/php-apache:7.3"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.3",
|
||||
label: "webdevops/php-nginx:7.3"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.2",
|
||||
label: "webdevops/php-apache:7.2"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.2",
|
||||
label: "webdevops/php-nginx:7.2"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.1",
|
||||
label: "webdevops/php-apache:7.1"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.1",
|
||||
label: "webdevops/php-nginx:7.1"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.0",
|
||||
label: "webdevops/php-apache:7.0"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.0",
|
||||
label: "webdevops/php-nginx:7.0"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:5.6",
|
||||
label: "webdevops/php-apache:5.6"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:5.6",
|
||||
label: "webdevops/php-nginx:5.6"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.2-alpine",
|
||||
label: "webdevops/php-apache:8.2-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.2-alpine",
|
||||
label: "webdevops/php-nginx:8.2-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.1-alpine",
|
||||
label: "webdevops/php-apache:8.1-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.1-alpine",
|
||||
label: "webdevops/php-nginx:8.1-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.0-alpine",
|
||||
label: "webdevops/php-apache:8.0-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.0-alpine",
|
||||
label: "webdevops/php-nginx:8.0-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.4-alpine",
|
||||
label: "webdevops/php-apache:7.4-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.4-alpine",
|
||||
label: "webdevops/php-nginx:7.4-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.3-alpine",
|
||||
label: "webdevops/php-apache:7.3-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.3-alpine",
|
||||
label: "webdevops/php-nginx:7.3-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.2-alpine",
|
||||
label: "webdevops/php-apache:7.2-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.2-alpine",
|
||||
label: "webdevops/php-nginx:7.2-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.1-alpine",
|
||||
label: "webdevops/php-apache:7.1-alpine"
|
||||
},
|
||||
{
|
||||
value: "php:8.1-fpm",
|
||||
label: "php:8.1-fpm"
|
||||
},
|
||||
{
|
||||
value: "php:8.0-fpm",
|
||||
label: "php:8.0-fpm"
|
||||
},
|
||||
{
|
||||
value: "php:8.1-fpm-alpine",
|
||||
label: "php:8.1-fpm-alpine"
|
||||
},
|
||||
{
|
||||
value: "php:8.0-fpm-alpine",
|
||||
label: "php:8.0-fpm-alpine"
|
||||
}
|
||||
];
|
||||
const pythonVersions = [
|
||||
{
|
||||
value: "python:3.10-alpine",
|
||||
label: "python:3.10-alpine"
|
||||
},
|
||||
{
|
||||
value: "python:3.10-buster",
|
||||
label: "python:3.10-buster"
|
||||
},
|
||||
{
|
||||
value: "python:3.10-bullseye",
|
||||
label: "python:3.10-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.10-slim-bullseye",
|
||||
label: "python:3.10-slim-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.9-alpine",
|
||||
label: "python:3.9-alpine"
|
||||
},
|
||||
{
|
||||
value: "python:3.9-buster",
|
||||
label: "python:3.9-buster"
|
||||
},
|
||||
{
|
||||
value: "python:3.9-bullseye",
|
||||
label: "python:3.9-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.9-slim-bullseye",
|
||||
label: "python:3.9-slim-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.8-alpine",
|
||||
label: "python:3.8-alpine"
|
||||
},
|
||||
{
|
||||
value: "python:3.8-buster",
|
||||
label: "python:3.8-buster"
|
||||
},
|
||||
{
|
||||
value: "python:3.8-bullseye",
|
||||
label: "python:3.8-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.8-slim-bullseye",
|
||||
label: "python:3.8-slim-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.7-alpine",
|
||||
label: "python:3.7-alpine"
|
||||
},
|
||||
{
|
||||
value: "python:3.7-buster",
|
||||
label: "python:3.7-buster"
|
||||
},
|
||||
{
|
||||
value: "python:3.7-bullseye",
|
||||
label: "python:3.7-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.7-slim-bullseye",
|
||||
label: "python:3.7-slim-bullseye"
|
||||
}
|
||||
];
|
||||
const herokuVersions = [
|
||||
{
|
||||
value: "heroku/builder:22",
|
||||
label: "heroku/builder:22"
|
||||
},
|
||||
{
|
||||
value: "heroku/buildpacks:20",
|
||||
label: "heroku/buildpacks:20"
|
||||
},
|
||||
{
|
||||
value: "heroku/builder-classic:22",
|
||||
label: "heroku/builder-classic:22"
|
||||
}
|
||||
];
|
||||
let payload = {
|
||||
baseImage: null,
|
||||
baseBuildImage: null,
|
||||
baseImages: [],
|
||||
baseBuildImages: []
|
||||
};
|
||||
if (nodeBased.includes(buildPack)) {
|
||||
if (deploymentType === "static") {
|
||||
payload.baseImage = (0, import_common.isARM)(process.arch) ? "nginx:alpine" : "webdevops/nginx:alpine";
|
||||
payload.baseImages = (0, import_common.isARM)(process.arch) ? staticVersions.filter((version2) => !version2.value.includes("webdevops")) : staticVersions;
|
||||
payload.baseBuildImage = "node:lts";
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
} else {
|
||||
payload.baseImage = "node:lts";
|
||||
payload.baseImages = nodeVersions;
|
||||
payload.baseBuildImage = "node:lts";
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
}
|
||||
if (staticApps.includes(buildPack)) {
|
||||
payload.baseImage = (0, import_common.isARM)(process.arch) ? "nginx:alpine" : "webdevops/nginx:alpine";
|
||||
payload.baseImages = (0, import_common.isARM)(process.arch) ? staticVersions.filter((version2) => !version2.value.includes("webdevops")) : staticVersions;
|
||||
payload.baseBuildImage = "node:lts";
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === "python") {
|
||||
payload.baseImage = "python:3.10-alpine";
|
||||
payload.baseImages = pythonVersions;
|
||||
}
|
||||
if (buildPack === "rust") {
|
||||
payload.baseImage = "rust:latest";
|
||||
payload.baseBuildImage = "rust:latest";
|
||||
payload.baseImages = rustVersions;
|
||||
payload.baseBuildImages = rustVersions;
|
||||
}
|
||||
if (buildPack === "deno") {
|
||||
payload.baseImage = "denoland/deno:latest";
|
||||
}
|
||||
if (buildPack === "php") {
|
||||
payload.baseImage = (0, import_common.isARM)(process.arch) ? "php:8.1-fpm-alpine" : "webdevops/php-apache:8.2-alpine";
|
||||
payload.baseImages = (0, import_common.isARM)(process.arch) ? phpVersions.filter((version2) => !version2.value.includes("webdevops")) : phpVersions;
|
||||
}
|
||||
if (buildPack === "laravel") {
|
||||
payload.baseImage = (0, import_common.isARM)(process.arch) ? "php:8.1-fpm-alpine" : "webdevops/php-apache:8.2-alpine";
|
||||
payload.baseImages = (0, import_common.isARM)(process.arch) ? phpVersions.filter((version2) => !version2.value.includes("webdevops")) : phpVersions;
|
||||
payload.baseBuildImage = "node:18";
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === "heroku") {
|
||||
payload.baseImage = "heroku/buildpacks:20";
|
||||
payload.baseImages = herokuVersions;
|
||||
}
|
||||
return payload;
|
||||
}
|
||||
const setDefaultConfiguration = async (data) => {
|
||||
let {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
} = data;
|
||||
const template = scanningTemplates[buildPack];
|
||||
if (!port) {
|
||||
port = template?.port || 3e3;
|
||||
if (buildPack === "static")
|
||||
port = 80;
|
||||
else if (buildPack === "node")
|
||||
port = 3e3;
|
||||
else if (buildPack === "php")
|
||||
port = 80;
|
||||
else if (buildPack === "python")
|
||||
port = 8e3;
|
||||
}
|
||||
if (!installCommand && buildPack !== "static" && buildPack !== "laravel")
|
||||
installCommand = template?.installCommand || "yarn install";
|
||||
if (!startCommand && buildPack !== "static" && buildPack !== "laravel")
|
||||
startCommand = template?.startCommand || "yarn start";
|
||||
if (!buildCommand && buildPack !== "static" && buildPack !== "laravel")
|
||||
buildCommand = template?.buildCommand || null;
|
||||
if (!publishDirectory)
|
||||
publishDirectory = template?.publishDirectory || null;
|
||||
if (baseDirectory) {
|
||||
if (!baseDirectory.startsWith("/"))
|
||||
baseDirectory = `/${baseDirectory}`;
|
||||
if (baseDirectory.endsWith("/") && baseDirectory !== "/")
|
||||
baseDirectory = baseDirectory.slice(0, -1);
|
||||
}
|
||||
if (dockerFileLocation) {
|
||||
if (!dockerFileLocation.startsWith("/"))
|
||||
dockerFileLocation = `/${dockerFileLocation}`;
|
||||
if (dockerFileLocation.endsWith("/"))
|
||||
dockerFileLocation = dockerFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerFileLocation = "/Dockerfile";
|
||||
}
|
||||
if (dockerComposeFileLocation) {
|
||||
if (!dockerComposeFileLocation.startsWith("/"))
|
||||
dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
|
||||
if (dockerComposeFileLocation.endsWith("/"))
|
||||
dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerComposeFileLocation = "/Dockerfile";
|
||||
}
|
||||
if (!denoMainFile) {
|
||||
denoMainFile = "main.ts";
|
||||
}
|
||||
return {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
};
|
||||
};
|
||||
const scanningTemplates = {
|
||||
"@sveltejs/kit": {
|
||||
buildPack: "nodejs"
|
||||
},
|
||||
astro: {
|
||||
buildPack: "astro"
|
||||
},
|
||||
"@11ty/eleventy": {
|
||||
buildPack: "eleventy"
|
||||
},
|
||||
svelte: {
|
||||
buildPack: "svelte"
|
||||
},
|
||||
"@nestjs/core": {
|
||||
buildPack: "nestjs"
|
||||
},
|
||||
next: {
|
||||
buildPack: "nextjs"
|
||||
},
|
||||
nuxt: {
|
||||
buildPack: "nuxtjs"
|
||||
},
|
||||
"react-scripts": {
|
||||
buildPack: "react"
|
||||
},
|
||||
"parcel-bundler": {
|
||||
buildPack: "static"
|
||||
},
|
||||
"@vue/cli-service": {
|
||||
buildPack: "vuejs"
|
||||
},
|
||||
vuejs: {
|
||||
buildPack: "vuejs"
|
||||
},
|
||||
gatsby: {
|
||||
buildPack: "gatsby"
|
||||
},
|
||||
"preact-cli": {
|
||||
buildPack: "react"
|
||||
}
|
||||
};
|
||||
const saveBuildLog = async ({
|
||||
line,
|
||||
buildId,
|
||||
applicationId
|
||||
}) => {
|
||||
if (buildId === "undefined" || buildId === "null" || !buildId)
|
||||
return;
|
||||
if (applicationId === "undefined" || applicationId === "null" || !applicationId)
|
||||
return;
|
||||
const { default: got } = await import("got");
|
||||
if (typeof line === "object" && line) {
|
||||
if (line.shortMessage) {
|
||||
line = line.shortMessage + "\n" + line.stderr;
|
||||
} else {
|
||||
line = JSON.stringify(line);
|
||||
}
|
||||
}
|
||||
if (line && typeof line === "string" && line.includes("ghs_")) {
|
||||
const regex = /ghs_.*@/g;
|
||||
line = line.replace(regex, "<SENSITIVE_DATA_DELETED>@");
|
||||
}
|
||||
const addTimestamp = `[${(0, import_common.generateTimestamp)()}] ${line}`;
|
||||
const fluentBitUrl = import_common.isDev ? process.env.COOLIFY_CONTAINER_DEV === "true" ? "http://coolify-fluentbit:24224" : "http://localhost:24224" : "http://coolify-fluentbit:24224";
|
||||
if (import_common.isDev && !process.env.COOLIFY_CONTAINER_DEV) {
|
||||
console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
}
|
||||
try {
|
||||
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
|
||||
json: {
|
||||
line: (0, import_common.encrypt)(line)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
return await import_prisma.prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp,
|
||||
buildId,
|
||||
time: Number((0, import_dayjs.day)().valueOf()),
|
||||
applicationId
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
async function copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage) {
|
||||
try {
|
||||
if (buildPack === "php") {
|
||||
await import_fs.promises.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`);
|
||||
await saveBuildLog({
|
||||
line: "Copied default configuration file for PHP.",
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
} else if (baseImage?.includes("nginx")) {
|
||||
await import_fs.promises.writeFile(
|
||||
`${workdir}/nginx.conf`,
|
||||
`user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /docker.stdout;
|
||||
pid /run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /docker.stdout main;
|
||||
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /app;
|
||||
index index.html;
|
||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||
}
|
||||
|
||||
error_page 404 /50x.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /app;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
function checkPnpm(installCommand = null, buildCommand = null, startCommand = null) {
|
||||
return installCommand?.includes("pnpm") || buildCommand?.includes("pnpm") || startCommand?.includes("pnpm");
|
||||
}
|
||||
async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
|
||||
if (!username || !password) {
|
||||
return null;
|
||||
}
|
||||
let decryptedPassword = (0, import_common.decrypt)(password);
|
||||
const location = `${workdir}/.docker`;
|
||||
try {
|
||||
await import_fs.promises.mkdir(`${workdir}/.docker`);
|
||||
} catch (error) {
|
||||
}
|
||||
const payload = JSON.stringify({
|
||||
auths: {
|
||||
[url]: {
|
||||
auth: Buffer.from(`${username}:${decryptedPassword}`).toString("base64")
|
||||
}
|
||||
}
|
||||
});
|
||||
await import_fs.promises.writeFile(`${location}/config.json`, payload);
|
||||
return location;
|
||||
}
|
||||
async function buildImage({
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
buildId,
|
||||
dockerId,
|
||||
isCache = false,
|
||||
debug = false,
|
||||
dockerFileLocation = "/Dockerfile",
|
||||
commit,
|
||||
forceRebuild = false
|
||||
}) {
|
||||
if (isCache) {
|
||||
await saveBuildLog({ line: `Building cache image...`, buildId, applicationId });
|
||||
} else {
|
||||
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||
}
|
||||
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`;
|
||||
const cache = `${applicationId}:${tag}${isCache ? "-cache" : ""}`;
|
||||
let location = null;
|
||||
const { dockerRegistry } = await import_prisma.prisma.application.findUnique({
|
||||
where: { id: applicationId },
|
||||
select: { dockerRegistry: true }
|
||||
});
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry;
|
||||
location = await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||
}
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
stream: true,
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker ${location ? `--config ${location}` : ""} build ${forceRebuild ? "--no-cache" : ""} --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}`
|
||||
});
|
||||
const { status } = await import_prisma.prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (status === "canceled") {
|
||||
throw new Error("Canceled.");
|
||||
}
|
||||
}
|
||||
function makeLabelForSimpleDockerfile({ applicationId, port, type }) {
|
||||
return [
|
||||
"coolify.managed=true",
|
||||
`coolify.version=${import_common.version}`,
|
||||
`coolify.applicationId=${applicationId}`,
|
||||
`coolify.type=standalone-application`
|
||||
];
|
||||
}
|
||||
function makeLabelForStandaloneApplication({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId = null,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
}) {
|
||||
if (pullmergeRequestId) {
|
||||
const protocol = fqdn.startsWith("https://") ? "https" : "http";
|
||||
const domain = (0, import_common.getDomain)(fqdn);
|
||||
fqdn = `${protocol}://${pullmergeRequestId}.${domain}`;
|
||||
}
|
||||
return [
|
||||
"coolify.managed=true",
|
||||
`coolify.version=${import_common.version}`,
|
||||
`coolify.applicationId=${applicationId}`,
|
||||
`coolify.type=standalone-application`,
|
||||
`coolify.name=${name}`,
|
||||
`coolify.configuration=${(0, import_common.base64Encode)(
|
||||
JSON.stringify({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
})
|
||||
)}`
|
||||
];
|
||||
}
|
||||
async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
const {
|
||||
workdir,
|
||||
buildId,
|
||||
baseDirectory,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
secrets,
|
||||
pullmergeRequestId
|
||||
} = data;
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand);
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push("RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7");
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""} ./`);
|
||||
if (installCommand) {
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
}
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join("\n"));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
async function buildCacheImageForLaravel(data, imageForBuild) {
|
||||
const { workdir, buildId, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push(`COPY *.json *.mix.js /app/`);
|
||||
Dockerfile.push(`COPY resources /app/resources`);
|
||||
Dockerfile.push(`RUN yarn install && yarn production`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join("\n"));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
async function buildCacheImageWithCargo(data, imageForBuild) {
|
||||
const { applicationId, workdir, buildId } = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push("RUN cargo install cargo-chef");
|
||||
Dockerfile.push("COPY . .");
|
||||
Dockerfile.push("RUN cargo chef prepare --recipe-path recipe.json");
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push("RUN cargo install cargo-chef");
|
||||
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
|
||||
Dockerfile.push("RUN cargo chef cook --release --recipe-path recipe.json");
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join("\n"));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
buildCacheImageForLaravel,
|
||||
buildCacheImageWithCargo,
|
||||
buildCacheImageWithNode,
|
||||
buildImage,
|
||||
checkPnpm,
|
||||
copyBaseConfigurationFiles,
|
||||
makeLabelForSimpleDockerfile,
|
||||
makeLabelForStandaloneApplication,
|
||||
saveBuildLog,
|
||||
saveDockerRegistryCredentials,
|
||||
scanningTemplates,
|
||||
setDefaultBaseImage,
|
||||
setDefaultConfiguration
|
||||
});
|
||||
137
apps/trpc-experimental/server/build/lib/buildPacks/compose.js
Normal file
137
apps/trpc-experimental/server/build/lib/buildPacks/compose.js
Normal file
@@ -0,0 +1,137 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var compose_exports = {};
|
||||
__export(compose_exports, {
|
||||
default: () => compose_default
|
||||
});
|
||||
module.exports = __toCommonJS(compose_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
var import_js_yaml = __toESM(require("js-yaml"));
|
||||
var import_docker = require("../docker");
|
||||
var import_executeCommand = require("../executeCommand");
|
||||
async function compose_default(data) {
|
||||
let {
|
||||
applicationId,
|
||||
debug,
|
||||
buildId,
|
||||
dockerId,
|
||||
network,
|
||||
volumes,
|
||||
labels,
|
||||
workdir,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
dockerComposeConfiguration,
|
||||
dockerComposeFileLocation
|
||||
} = data;
|
||||
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
|
||||
const dockerComposeRaw = await import_fs.promises.readFile(fileYaml, "utf8");
|
||||
const dockerComposeYaml = import_js_yaml.default.load(dockerComposeRaw);
|
||||
if (!dockerComposeYaml.services) {
|
||||
throw "No Services found in docker-compose file.";
|
||||
}
|
||||
let envs = [];
|
||||
if (secrets.length > 0) {
|
||||
envs = [...envs, ...(0, import_common.generateSecrets)(secrets, pullmergeRequestId, false, null)];
|
||||
}
|
||||
const composeVolumes = [];
|
||||
if (volumes.length > 0) {
|
||||
for (const volume of volumes) {
|
||||
let [v, path] = volume.split(":");
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
}
|
||||
}
|
||||
let networks = {};
|
||||
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
||||
value["container_name"] = `${applicationId}-${key}`;
|
||||
let environment = typeof value["environment"] === "undefined" ? [] : value["environment"];
|
||||
value["environment"] = [...environment, ...envs];
|
||||
value["labels"] = labels;
|
||||
if (value["volumes"]?.length > 0) {
|
||||
value["volumes"] = value["volumes"].map((volume) => {
|
||||
let [v, path, permission] = volume.split(":");
|
||||
if (!path) {
|
||||
path = v;
|
||||
v = `${applicationId}${v.replace(/\//gi, "-").replace(/\./gi, "")}`;
|
||||
} else {
|
||||
v = `${applicationId}${v.replace(/\//gi, "-").replace(/\./gi, "")}`;
|
||||
}
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
return `${v}:${path}${permission ? ":" + permission : ""}`;
|
||||
});
|
||||
}
|
||||
if (volumes.length > 0) {
|
||||
for (const volume of volumes) {
|
||||
value["volumes"].push(volume);
|
||||
}
|
||||
}
|
||||
if (dockerComposeConfiguration[key].port) {
|
||||
value["expose"] = [dockerComposeConfiguration[key].port];
|
||||
}
|
||||
if (value["networks"]?.length > 0) {
|
||||
value["networks"].forEach((network2) => {
|
||||
networks[network2] = {
|
||||
name: network2
|
||||
};
|
||||
});
|
||||
}
|
||||
value["networks"] = [...value["networks"] || "", network];
|
||||
dockerComposeYaml.services[key] = {
|
||||
...dockerComposeYaml.services[key],
|
||||
restart: (0, import_docker.defaultComposeConfiguration)(network).restart,
|
||||
deploy: (0, import_docker.defaultComposeConfiguration)(network).deploy
|
||||
};
|
||||
}
|
||||
if (Object.keys(composeVolumes).length > 0) {
|
||||
dockerComposeYaml["volumes"] = { ...composeVolumes };
|
||||
}
|
||||
dockerComposeYaml["networks"] = Object.assign({ ...networks }, { [network]: { external: true } });
|
||||
await import_fs.promises.writeFile(fileYaml, import_js_yaml.default.dump(dockerComposeYaml));
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} pull`
|
||||
});
|
||||
await (0, import_common2.saveBuildLog)({ line: "Pulling images from Compose file...", buildId, applicationId });
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} build --progress plain`
|
||||
});
|
||||
await (0, import_common2.saveBuildLog)({ line: "Building images from Compose file...", buildId, applicationId });
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
74
apps/trpc-experimental/server/build/lib/buildPacks/deno.js
Normal file
74
apps/trpc-experimental/server/build/lib/buildPacks/deno.js
Normal file
@@ -0,0 +1,74 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var deno_exports = {};
|
||||
__export(deno_exports, {
|
||||
default: () => deno_default
|
||||
});
|
||||
module.exports = __toCommonJS(deno_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile = [];
|
||||
let depsFound = false;
|
||||
try {
|
||||
await import_fs.promises.readFile(`${workdir}${baseDirectory || ""}/deps.ts`);
|
||||
depsFound = true;
|
||||
} catch (error) {
|
||||
}
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (depsFound) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""}/deps.ts /app`);
|
||||
Dockerfile.push(`RUN deno cache deps.ts`);
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""} ./`);
|
||||
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
||||
Dockerfile.push(`ENV NO_COLOR true`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD deno run ${denoOptions || ""} ${denoMainFile}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function deno_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
51
apps/trpc-experimental/server/build/lib/buildPacks/docker.js
Normal file
51
apps/trpc-experimental/server/build/lib/buildPacks/docker.js
Normal file
@@ -0,0 +1,51 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var docker_exports = {};
|
||||
__export(docker_exports, {
|
||||
default: () => docker_default
|
||||
});
|
||||
module.exports = __toCommonJS(docker_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
async function docker_default(data) {
|
||||
let { workdir, buildId, baseDirectory, secrets, pullmergeRequestId, dockerFileLocation } = data;
|
||||
const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
|
||||
data.workdir = `${workdir}${baseDirectory}`;
|
||||
const DockerfileRaw = await import_fs.promises.readFile(`${file}`, "utf8");
|
||||
const Dockerfile = DockerfileRaw.toString().trim().split("\n");
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith("FROM")) {
|
||||
Dockerfile.splice(index + 1, 0, `LABEL coolify.buildId=${buildId}`);
|
||||
}
|
||||
});
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith("FROM")) {
|
||||
Dockerfile.splice(index + 1, 0, env);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
await import_fs.promises.writeFile(`${data.workdir}${dockerFileLocation}`, Dockerfile.join("\n"));
|
||||
await (0, import_common2.buildImage)(data);
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
50
apps/trpc-experimental/server/build/lib/buildPacks/gatsby.js
Normal file
50
apps/trpc-experimental/server/build/lib/buildPacks/gatsby.js
Normal file
@@ -0,0 +1,50 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var gatsby_exports = {};
|
||||
__export(gatsby_exports, {
|
||||
default: () => gatsby_default
|
||||
});
|
||||
module.exports = __toCommonJS(gatsby_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("./common");
|
||||
const createDockerfile = async (data, imageforBuild) => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${imageforBuild}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes("nginx")) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function gatsby_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await (0, import_common.buildCacheImageWithNode)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
41
apps/trpc-experimental/server/build/lib/buildPacks/heroku.js
Normal file
41
apps/trpc-experimental/server/build/lib/buildPacks/heroku.js
Normal file
@@ -0,0 +1,41 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var heroku_exports = {};
|
||||
__export(heroku_exports, {
|
||||
default: () => heroku_default
|
||||
});
|
||||
module.exports = __toCommonJS(heroku_exports);
|
||||
var import_executeCommand = require("../executeCommand");
|
||||
var import_common = require("./common");
|
||||
async function heroku_default(data) {
|
||||
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data;
|
||||
try {
|
||||
await (0, import_common.saveBuildLog)({ line: `Building production image...`, buildId, applicationId });
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
buildId,
|
||||
debug,
|
||||
dockerId,
|
||||
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
|
||||
});
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
88
apps/trpc-experimental/server/build/lib/buildPacks/index.js
Normal file
88
apps/trpc-experimental/server/build/lib/buildPacks/index.js
Normal file
@@ -0,0 +1,88 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var buildPacks_exports = {};
|
||||
__export(buildPacks_exports, {
|
||||
astro: () => import_static2.default,
|
||||
compose: () => import_compose.default,
|
||||
deno: () => import_deno.default,
|
||||
docker: () => import_docker.default,
|
||||
eleventy: () => import_static3.default,
|
||||
gatsby: () => import_gatsby.default,
|
||||
heroku: () => import_heroku.default,
|
||||
laravel: () => import_laravel.default,
|
||||
nestjs: () => import_nestjs.default,
|
||||
nextjs: () => import_nextjs.default,
|
||||
node: () => import_node.default,
|
||||
nuxtjs: () => import_nuxtjs.default,
|
||||
php: () => import_php.default,
|
||||
python: () => import_python.default,
|
||||
react: () => import_react.default,
|
||||
rust: () => import_rust.default,
|
||||
staticApp: () => import_static.default,
|
||||
svelte: () => import_svelte.default,
|
||||
vuejs: () => import_vuejs.default
|
||||
});
|
||||
module.exports = __toCommonJS(buildPacks_exports);
|
||||
var import_node = __toESM(require("./node"));
|
||||
var import_static = __toESM(require("./static"));
|
||||
var import_docker = __toESM(require("./docker"));
|
||||
var import_gatsby = __toESM(require("./gatsby"));
|
||||
var import_svelte = __toESM(require("./svelte"));
|
||||
var import_react = __toESM(require("./react"));
|
||||
var import_nestjs = __toESM(require("./nestjs"));
|
||||
var import_nextjs = __toESM(require("./nextjs"));
|
||||
var import_nuxtjs = __toESM(require("./nuxtjs"));
|
||||
var import_vuejs = __toESM(require("./vuejs"));
|
||||
var import_php = __toESM(require("./php"));
|
||||
var import_rust = __toESM(require("./rust"));
|
||||
var import_static2 = __toESM(require("./static"));
|
||||
var import_static3 = __toESM(require("./static"));
|
||||
var import_python = __toESM(require("./python"));
|
||||
var import_deno = __toESM(require("./deno"));
|
||||
var import_laravel = __toESM(require("./laravel"));
|
||||
var import_heroku = __toESM(require("./heroku"));
|
||||
var import_compose = __toESM(require("./compose"));
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
astro,
|
||||
compose,
|
||||
deno,
|
||||
docker,
|
||||
eleventy,
|
||||
gatsby,
|
||||
heroku,
|
||||
laravel,
|
||||
nestjs,
|
||||
nextjs,
|
||||
node,
|
||||
nuxtjs,
|
||||
php,
|
||||
python,
|
||||
react,
|
||||
rust,
|
||||
staticApp,
|
||||
svelte,
|
||||
vuejs
|
||||
});
|
||||
@@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var laravel_exports = {};
|
||||
__export(laravel_exports, {
|
||||
default: () => laravel_default
|
||||
});
|
||||
module.exports = __toCommonJS(laravel_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const { workdir, applicationId, tag, buildId, port, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`);
|
||||
Dockerfile.push(`COPY --chown=application:application composer.* ./`);
|
||||
Dockerfile.push(`COPY --chown=application:application database/ database/`);
|
||||
Dockerfile.push(
|
||||
`RUN composer install --ignore-platform-reqs --no-interaction --no-plugins --no-scripts --prefer-dist`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/js/ /app/public/js/`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/css/ /app/public/css/`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
|
||||
);
|
||||
Dockerfile.push(`COPY --chown=application:application . ./`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function laravel_default(data) {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
try {
|
||||
await (0, import_common2.buildCacheImageForLaravel)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
52
apps/trpc-experimental/server/build/lib/buildPacks/nestjs.js
Normal file
52
apps/trpc-experimental/server/build/lib/buildPacks/nestjs.js
Normal file
@@ -0,0 +1,52 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var nestjs_exports = {};
|
||||
__export(nestjs_exports, {
|
||||
default: () => nestjs_default
|
||||
});
|
||||
module.exports = __toCommonJS(nestjs_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
|
||||
const Dockerfile = [];
|
||||
const isPnpm = startCommand.includes("pnpm");
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (isPnpm) {
|
||||
Dockerfile.push("RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7");
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${baseDirectory || ""} ./`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function nestjs_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await (0, import_common.buildCacheImageWithNode)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
89
apps/trpc-experimental/server/build/lib/buildPacks/nextjs.js
Normal file
89
apps/trpc-experimental/server/build/lib/buildPacks/nextjs.js
Normal file
@@ -0,0 +1,89 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var nextjs_exports = {};
|
||||
__export(nextjs_exports, {
|
||||
default: () => nextjs_default
|
||||
});
|
||||
module.exports = __toCommonJS(nextjs_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const {
|
||||
applicationId,
|
||||
buildId,
|
||||
tag,
|
||||
workdir,
|
||||
publishDirectory,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
deploymentType,
|
||||
baseImage
|
||||
} = data;
|
||||
const Dockerfile = [];
|
||||
const isPnpm = (0, import_common2.checkPnpm)(installCommand, buildCommand, startCommand);
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push("RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7");
|
||||
}
|
||||
if (deploymentType === "node") {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
} else if (deploymentType === "static") {
|
||||
if (baseImage?.includes("nginx")) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`EXPOSE 80`);
|
||||
}
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function nextjs_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
|
||||
if (deploymentType === "node") {
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
} else if (deploymentType === "static") {
|
||||
if (buildCommand)
|
||||
await (0, import_common2.buildCacheImageWithNode)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
71
apps/trpc-experimental/server/build/lib/buildPacks/node.js
Normal file
71
apps/trpc-experimental/server/build/lib/buildPacks/node.js
Normal file
@@ -0,0 +1,71 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var node_exports = {};
|
||||
__export(node_exports, {
|
||||
default: () => node_default
|
||||
});
|
||||
module.exports = __toCommonJS(node_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile = [];
|
||||
const isPnpm = (0, import_common2.checkPnpm)(installCommand, buildCommand, startCommand);
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push("RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7");
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function node_default(data) {
|
||||
try {
|
||||
const { baseImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
89
apps/trpc-experimental/server/build/lib/buildPacks/nuxtjs.js
Normal file
89
apps/trpc-experimental/server/build/lib/buildPacks/nuxtjs.js
Normal file
@@ -0,0 +1,89 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var nuxtjs_exports = {};
|
||||
__export(nuxtjs_exports, {
|
||||
default: () => nuxtjs_default
|
||||
});
|
||||
module.exports = __toCommonJS(nuxtjs_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const {
|
||||
applicationId,
|
||||
buildId,
|
||||
tag,
|
||||
workdir,
|
||||
publishDirectory,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
deploymentType,
|
||||
baseImage
|
||||
} = data;
|
||||
const Dockerfile = [];
|
||||
const isPnpm = (0, import_common2.checkPnpm)(installCommand, buildCommand, startCommand);
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push("RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7");
|
||||
}
|
||||
if (deploymentType === "node") {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
} else if (deploymentType === "static") {
|
||||
if (baseImage?.includes("nginx")) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`EXPOSE 80`);
|
||||
}
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function nuxtjs_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
|
||||
if (deploymentType === "node") {
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
} else if (deploymentType === "static") {
|
||||
if (buildCommand)
|
||||
await (0, import_common2.buildCacheImageWithNode)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
71
apps/trpc-experimental/server/build/lib/buildPacks/php.js
Normal file
71
apps/trpc-experimental/server/build/lib/buildPacks/php.js
Normal file
@@ -0,0 +1,71 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var php_exports = {};
|
||||
__export(php_exports, {
|
||||
default: () => php_default
|
||||
});
|
||||
module.exports = __toCommonJS(php_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
const createDockerfile = async (data, image, htaccessFound) => {
|
||||
const { workdir, baseDirectory, buildId, port, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile = [];
|
||||
let composerFound = false;
|
||||
try {
|
||||
await import_fs.promises.readFile(`${workdir}${baseDirectory || ""}/composer.json`);
|
||||
composerFound = true;
|
||||
} catch (error) {
|
||||
}
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""} /app`);
|
||||
if (htaccessFound) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""}/.htaccess ./`);
|
||||
}
|
||||
if (composerFound) {
|
||||
Dockerfile.push(`RUN composer install`);
|
||||
}
|
||||
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function php_default(data) {
|
||||
const { workdir, baseDirectory, baseImage } = data;
|
||||
try {
|
||||
let htaccessFound = false;
|
||||
try {
|
||||
await import_fs.promises.readFile(`${workdir}${baseDirectory || ""}/.htaccess`);
|
||||
htaccessFound = true;
|
||||
} catch (e) {
|
||||
}
|
||||
await createDockerfile(data, baseImage, htaccessFound);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
86
apps/trpc-experimental/server/build/lib/buildPacks/python.js
Normal file
86
apps/trpc-experimental/server/build/lib/buildPacks/python.js
Normal file
@@ -0,0 +1,86 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var python_exports = {};
|
||||
__export(python_exports, {
|
||||
default: () => python_default
|
||||
});
|
||||
module.exports = __toCommonJS(python_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (pythonWSGI?.toLowerCase() === "gunicorn") {
|
||||
Dockerfile.push(`RUN pip install gunicorn`);
|
||||
} else if (pythonWSGI?.toLowerCase() === "uvicorn") {
|
||||
Dockerfile.push(`RUN pip install uvicorn`);
|
||||
} else if (pythonWSGI?.toLowerCase() === "uwsgi") {
|
||||
Dockerfile.push(`RUN apk add --no-cache uwsgi-python3`);
|
||||
}
|
||||
try {
|
||||
await import_fs.promises.stat(`${workdir}${baseDirectory || ""}/requirements.txt`);
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""}/requirements.txt ./`);
|
||||
Dockerfile.push(`RUN pip install --no-cache-dir -r .${baseDirectory || ""}/requirements.txt`);
|
||||
} catch (e) {
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""} ./`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
if (pythonWSGI?.toLowerCase() === "gunicorn") {
|
||||
Dockerfile.push(`CMD gunicorn -w=4 -b=0.0.0.0:8000 ${pythonModule}:${pythonVariable}`);
|
||||
} else if (pythonWSGI?.toLowerCase() === "uvicorn") {
|
||||
Dockerfile.push(`CMD uvicorn ${pythonModule}:${pythonVariable} --port ${port} --host 0.0.0.0`);
|
||||
} else if (pythonWSGI?.toLowerCase() === "uwsgi") {
|
||||
Dockerfile.push(
|
||||
`CMD uwsgi --master -p 4 --http-socket 0.0.0.0:8000 --uid uwsgi --plugins python3 --protocol uwsgi --wsgi ${pythonModule}:${pythonVariable}`
|
||||
);
|
||||
} else {
|
||||
Dockerfile.push(`CMD python ${pythonModule}`);
|
||||
}
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function python_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
50
apps/trpc-experimental/server/build/lib/buildPacks/react.js
vendored
Normal file
50
apps/trpc-experimental/server/build/lib/buildPacks/react.js
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var react_exports = {};
|
||||
__export(react_exports, {
|
||||
default: () => react_default
|
||||
});
|
||||
module.exports = __toCommonJS(react_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes("nginx")) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function react_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await (0, import_common.buildCacheImageWithNode)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
69
apps/trpc-experimental/server/build/lib/buildPacks/rust.js
Normal file
69
apps/trpc-experimental/server/build/lib/buildPacks/rust.js
Normal file
@@ -0,0 +1,69 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var rust_exports = {};
|
||||
__export(rust_exports, {
|
||||
default: () => rust_default
|
||||
});
|
||||
module.exports = __toCommonJS(rust_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_toml = __toESM(require("@iarna/toml"));
|
||||
var import_common = require("./common");
|
||||
var import_executeCommand = require("../executeCommand");
|
||||
const createDockerfile = async (data, image, name) => {
|
||||
const { workdir, port, applicationId, tag, buildId } = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target target`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /usr/local/cargo /usr/local/cargo`);
|
||||
Dockerfile.push(`COPY . .`);
|
||||
Dockerfile.push(`RUN cargo build --release --bin ${name}`);
|
||||
Dockerfile.push("FROM debian:buster-slim");
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(
|
||||
`RUN apt-get update -y && apt-get install -y --no-install-recommends openssl libcurl4 ca-certificates && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*`
|
||||
);
|
||||
Dockerfile.push(`RUN update-ca-certificates`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target/release/${name} ${name}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ["/app/${name}"]`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function rust_default(data) {
|
||||
try {
|
||||
const { workdir, baseImage, baseBuildImage } = data;
|
||||
const { stdout: cargoToml } = await (0, import_executeCommand.executeCommand)({ command: `cat ${workdir}/Cargo.toml` });
|
||||
const parsedToml = import_toml.default.parse(cargoToml);
|
||||
const name = parsedToml.package.name;
|
||||
await (0, import_common.buildCacheImageWithCargo)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage, name);
|
||||
await (0, import_common.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
77
apps/trpc-experimental/server/build/lib/buildPacks/static.js
Normal file
77
apps/trpc-experimental/server/build/lib/buildPacks/static.js
Normal file
@@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var static_exports = {};
|
||||
__export(static_exports, {
|
||||
default: () => static_default
|
||||
});
|
||||
module.exports = __toCommonJS(static_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("../common");
|
||||
var import_common2 = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const {
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
buildCommand,
|
||||
baseDirectory,
|
||||
publishDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
baseImage,
|
||||
buildId,
|
||||
port
|
||||
} = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
if (baseImage?.includes("httpd")) {
|
||||
Dockerfile.push("WORKDIR /usr/local/apache2/htdocs/");
|
||||
} else {
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
}
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
(0, import_common.generateSecrets)(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
} else {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ""} ./`);
|
||||
}
|
||||
if (baseImage?.includes("nginx")) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function static_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
if (data.buildCommand)
|
||||
await (0, import_common2.buildCacheImageWithNode)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common2.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
50
apps/trpc-experimental/server/build/lib/buildPacks/svelte.js
Normal file
50
apps/trpc-experimental/server/build/lib/buildPacks/svelte.js
Normal file
@@ -0,0 +1,50 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var svelte_exports = {};
|
||||
__export(svelte_exports, {
|
||||
default: () => svelte_default
|
||||
});
|
||||
module.exports = __toCommonJS(svelte_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes("nginx")) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function svelte_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await (0, import_common.buildCacheImageWithNode)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
50
apps/trpc-experimental/server/build/lib/buildPacks/vuejs.js
Normal file
50
apps/trpc-experimental/server/build/lib/buildPacks/vuejs.js
Normal file
@@ -0,0 +1,50 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var vuejs_exports = {};
|
||||
__export(vuejs_exports, {
|
||||
default: () => vuejs_default
|
||||
});
|
||||
module.exports = __toCommonJS(vuejs_exports);
|
||||
var import_fs = require("fs");
|
||||
var import_common = require("./common");
|
||||
const createDockerfile = async (data, image) => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push("WORKDIR /app");
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes("nginx")) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await import_fs.promises.writeFile(`${workdir}/Dockerfile`, Dockerfile.join("\n"));
|
||||
};
|
||||
async function vuejs_default(data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await (0, import_common.buildCacheImageWithNode)(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await (0, import_common.buildImage)(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
952
apps/trpc-experimental/server/build/lib/common.js
Normal file
952
apps/trpc-experimental/server/build/lib/common.js
Normal file
@@ -0,0 +1,952 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var common_exports = {};
|
||||
__export(common_exports, {
|
||||
asyncSleep: () => asyncSleep,
|
||||
base64Decode: () => base64Decode,
|
||||
base64Encode: () => base64Encode,
|
||||
checkDomainsIsValidInDNS: () => checkDomainsIsValidInDNS,
|
||||
checkExposedPort: () => checkExposedPort,
|
||||
cleanupDB: () => cleanupDB,
|
||||
comparePassword: () => comparePassword,
|
||||
configureNetworkTraefikProxy: () => configureNetworkTraefikProxy,
|
||||
createDirectories: () => createDirectories,
|
||||
decrypt: () => decrypt,
|
||||
decryptApplication: () => decryptApplication,
|
||||
defaultTraefikImage: () => defaultTraefikImage,
|
||||
encrypt: () => encrypt,
|
||||
fixType: () => fixType,
|
||||
generateRangeArray: () => generateRangeArray,
|
||||
generateSecrets: () => generateSecrets,
|
||||
generateTimestamp: () => generateTimestamp,
|
||||
getAPIUrl: () => getAPIUrl,
|
||||
getContainerUsage: () => getContainerUsage,
|
||||
getCurrentUser: () => getCurrentUser,
|
||||
getDomain: () => getDomain,
|
||||
getFreeExposedPort: () => getFreeExposedPort,
|
||||
getTags: () => getTags,
|
||||
getTeamInvitation: () => getTeamInvitation,
|
||||
getTemplates: () => getTemplates,
|
||||
getUIUrl: () => getUIUrl,
|
||||
hashPassword: () => hashPassword,
|
||||
isARM: () => isARM,
|
||||
isDev: () => isDev,
|
||||
isDomainConfigured: () => isDomainConfigured,
|
||||
listSettings: () => listSettings,
|
||||
makeLabelForServices: () => makeLabelForServices,
|
||||
pushToRegistry: () => pushToRegistry,
|
||||
removeService: () => removeService,
|
||||
saveDockerRegistryCredentials: () => saveDockerRegistryCredentials,
|
||||
scanningTemplates: () => scanningTemplates,
|
||||
sentryDSN: () => sentryDSN,
|
||||
setDefaultConfiguration: () => setDefaultConfiguration,
|
||||
startTraefikProxy: () => startTraefikProxy,
|
||||
startTraefikTCPProxy: () => startTraefikTCPProxy,
|
||||
stopTraefikProxy: () => stopTraefikProxy,
|
||||
uniqueName: () => uniqueName,
|
||||
version: () => version
|
||||
});
|
||||
module.exports = __toCommonJS(common_exports);
|
||||
var import_prisma = require("../prisma");
|
||||
var import_bcryptjs = __toESM(require("bcryptjs"));
|
||||
var import_crypto = __toESM(require("crypto"));
|
||||
var import_dns = require("dns");
|
||||
var import_promises = __toESM(require("fs/promises"));
|
||||
var import_unique_names_generator = require("unique-names-generator");
|
||||
var import_env = require("../env");
|
||||
var import_dayjs = require("./dayjs");
|
||||
var import_executeCommand = require("./executeCommand");
|
||||
var import_logging = require("./logging");
|
||||
var import_docker = require("./docker");
|
||||
var import_js_yaml = __toESM(require("js-yaml"));
|
||||
const customConfig = {
|
||||
dictionaries: [import_unique_names_generator.adjectives, import_unique_names_generator.colors, import_unique_names_generator.animals],
|
||||
style: "capital",
|
||||
separator: " ",
|
||||
length: 3
|
||||
};
|
||||
const algorithm = "aes-256-ctr";
|
||||
const isDev = import_env.env.NODE_ENV === "development";
|
||||
const version = "3.13.0";
|
||||
const sentryDSN = "https://409f09bcb7af47928d3e0f46b78987f3@o1082494.ingest.sentry.io/4504236622217216";
|
||||
const defaultTraefikImage = `traefik:v2.8`;
|
||||
function getAPIUrl() {
|
||||
if (process.env.GITPOD_WORKSPACE_URL) {
|
||||
const { href } = new URL(process.env.GITPOD_WORKSPACE_URL);
|
||||
const newURL = href.replace("https://", "https://3001-").replace(/\/$/, "");
|
||||
return newURL;
|
||||
}
|
||||
if (process.env.CODESANDBOX_HOST) {
|
||||
return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, "3001")}`;
|
||||
}
|
||||
return isDev ? "http://host.docker.internal:3001" : "http://localhost:3000";
|
||||
}
|
||||
function getUIUrl() {
|
||||
if (process.env.GITPOD_WORKSPACE_URL) {
|
||||
const { href } = new URL(process.env.GITPOD_WORKSPACE_URL);
|
||||
const newURL = href.replace("https://", "https://3000-").replace(/\/$/, "");
|
||||
return newURL;
|
||||
}
|
||||
if (process.env.CODESANDBOX_HOST) {
|
||||
return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, "3000")}`;
|
||||
}
|
||||
return "http://localhost:3000";
|
||||
}
|
||||
const mainTraefikEndpoint = isDev ? `${getAPIUrl()}/webhooks/traefik/main.json` : "http://coolify:3000/webhooks/traefik/main.json";
|
||||
const otherTraefikEndpoint = isDev ? `${getAPIUrl()}/webhooks/traefik/other.json` : "http://coolify:3000/webhooks/traefik/other.json";
|
||||
async function listSettings() {
|
||||
return await import_prisma.prisma.setting.findUnique({ where: { id: "0" } });
|
||||
}
|
||||
async function getCurrentUser(userId) {
|
||||
return await import_prisma.prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
include: { teams: true, permission: true }
|
||||
});
|
||||
}
|
||||
async function getTeamInvitation(userId) {
|
||||
return await import_prisma.prisma.teamInvitation.findMany({ where: { uid: userId } });
|
||||
}
|
||||
async function hashPassword(password) {
|
||||
const saltRounds = 15;
|
||||
return import_bcryptjs.default.hash(password, saltRounds);
|
||||
}
|
||||
async function comparePassword(password, hashedPassword) {
|
||||
return import_bcryptjs.default.compare(password, hashedPassword);
|
||||
}
|
||||
const uniqueName = () => (0, import_unique_names_generator.uniqueNamesGenerator)(customConfig);
|
||||
const decrypt = (hashString) => {
|
||||
if (hashString) {
|
||||
try {
|
||||
const hash = JSON.parse(hashString);
|
||||
const decipher = import_crypto.default.createDecipheriv(
|
||||
algorithm,
|
||||
import_env.env.COOLIFY_SECRET_KEY,
|
||||
Buffer.from(hash.iv, "hex")
|
||||
);
|
||||
const decrpyted = Buffer.concat([
|
||||
decipher.update(Buffer.from(hash.content, "hex")),
|
||||
decipher.final()
|
||||
]);
|
||||
return decrpyted.toString();
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
console.log({ decryptionError: error.message });
|
||||
}
|
||||
return hashString;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
function generateRangeArray(start, end) {
|
||||
return Array.from({ length: end - start }, (_v, k) => k + start);
|
||||
}
|
||||
function generateTimestamp() {
|
||||
return `${(0, import_dayjs.day)().format("HH:mm:ss.SSS")}`;
|
||||
}
|
||||
const encrypt = (text) => {
|
||||
if (text) {
|
||||
const iv = import_crypto.default.randomBytes(16);
|
||||
const cipher = import_crypto.default.createCipheriv(algorithm, import_env.env.COOLIFY_SECRET_KEY, iv);
|
||||
const encrypted = Buffer.concat([cipher.update(text.trim()), cipher.final()]);
|
||||
return JSON.stringify({
|
||||
iv: iv.toString("hex"),
|
||||
content: encrypted.toString("hex")
|
||||
});
|
||||
}
|
||||
return false;
|
||||
};
|
||||
async function getTemplates() {
|
||||
const templatePath = isDev ? "./templates.json" : "/app/templates.json";
|
||||
const open = await import_promises.default.open(templatePath, "r");
|
||||
try {
|
||||
let data = await open.readFile({ encoding: "utf-8" });
|
||||
let jsonData = JSON.parse(data);
|
||||
if (isARM(process.arch)) {
|
||||
jsonData = jsonData.filter((d) => d.arch !== "amd64");
|
||||
}
|
||||
return jsonData;
|
||||
} catch (error) {
|
||||
return [];
|
||||
} finally {
|
||||
await open?.close();
|
||||
}
|
||||
}
|
||||
function isARM(arch) {
|
||||
if (arch === "arm" || arch === "arm64" || arch === "aarch" || arch === "aarch64") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
async function removeService({ id }) {
|
||||
await import_prisma.prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.serviceSetting.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.servicePersistentStorage.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.meiliSearch.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.fider.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.ghost.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.umami.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.hasura.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.plausibleAnalytics.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.minio.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.vscodeserver.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.wordpress.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.glitchTip.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.moodle.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.appwrite.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.searxng.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.weblate.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.taiga.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.service.delete({ where: { id } });
|
||||
}
|
||||
const createDirectories = async ({
|
||||
repository,
|
||||
buildId
|
||||
}) => {
|
||||
if (repository)
|
||||
repository = repository.replaceAll(" ", "");
|
||||
const repodir = `/tmp/build-sources/${repository}/`;
|
||||
const workdir = `/tmp/build-sources/${repository}/${buildId}`;
|
||||
let workdirFound = false;
|
||||
try {
|
||||
workdirFound = !!await import_promises.default.stat(workdir);
|
||||
} catch (error) {
|
||||
}
|
||||
if (workdirFound) {
|
||||
await (0, import_executeCommand.executeCommand)({ command: `rm -fr ${workdir}` });
|
||||
}
|
||||
await (0, import_executeCommand.executeCommand)({ command: `mkdir -p ${workdir}` });
|
||||
return {
|
||||
workdir,
|
||||
repodir
|
||||
};
|
||||
};
|
||||
async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
|
||||
if (!username || !password) {
|
||||
return null;
|
||||
}
|
||||
let decryptedPassword = decrypt(password);
|
||||
const location = `${workdir}/.docker`;
|
||||
try {
|
||||
await import_promises.default.mkdir(`${workdir}/.docker`);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
const payload = JSON.stringify({
|
||||
auths: {
|
||||
[url]: {
|
||||
auth: Buffer.from(`${username}:${decryptedPassword}`).toString("base64")
|
||||
}
|
||||
}
|
||||
});
|
||||
await import_promises.default.writeFile(`${location}/config.json`, payload);
|
||||
return location;
|
||||
}
|
||||
function getDomain(domain) {
|
||||
if (domain) {
|
||||
return domain?.replace("https://", "").replace("http://", "");
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
async function isDomainConfigured({
|
||||
id,
|
||||
fqdn,
|
||||
checkOwn = false,
|
||||
remoteIpAddress = void 0
|
||||
}) {
|
||||
const domain = getDomain(fqdn);
|
||||
const nakedDomain = domain.replace("www.", "");
|
||||
const foundApp = await import_prisma.prisma.application.findFirst({
|
||||
where: {
|
||||
OR: [
|
||||
{ fqdn: { endsWith: `//${nakedDomain}` } },
|
||||
{ fqdn: { endsWith: `//www.${nakedDomain}` } },
|
||||
{ dockerComposeConfiguration: { contains: `//${nakedDomain}` } },
|
||||
{ dockerComposeConfiguration: { contains: `//www.${nakedDomain}` } }
|
||||
],
|
||||
id: { not: id },
|
||||
destinationDocker: {
|
||||
remoteIpAddress
|
||||
}
|
||||
},
|
||||
select: { fqdn: true }
|
||||
});
|
||||
const foundService = await import_prisma.prisma.service.findFirst({
|
||||
where: {
|
||||
OR: [
|
||||
{ fqdn: { endsWith: `//${nakedDomain}` } },
|
||||
{ fqdn: { endsWith: `//www.${nakedDomain}` } }
|
||||
],
|
||||
id: { not: checkOwn ? void 0 : id },
|
||||
destinationDocker: {
|
||||
remoteIpAddress
|
||||
}
|
||||
},
|
||||
select: { fqdn: true }
|
||||
});
|
||||
const coolifyFqdn = await import_prisma.prisma.setting.findFirst({
|
||||
where: {
|
||||
OR: [
|
||||
{ fqdn: { endsWith: `//${nakedDomain}` } },
|
||||
{ fqdn: { endsWith: `//www.${nakedDomain}` } }
|
||||
],
|
||||
id: { not: id }
|
||||
},
|
||||
select: { fqdn: true }
|
||||
});
|
||||
return !!(foundApp || foundService || coolifyFqdn);
|
||||
}
|
||||
async function checkExposedPort({
|
||||
id,
|
||||
configuredPort,
|
||||
exposePort,
|
||||
engine,
|
||||
remoteEngine,
|
||||
remoteIpAddress
|
||||
}) {
|
||||
if (exposePort < 1024 || exposePort > 65535) {
|
||||
throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` };
|
||||
}
|
||||
if (configuredPort) {
|
||||
if (configuredPort !== exposePort) {
|
||||
const availablePort = await getFreeExposedPort(
|
||||
id,
|
||||
exposePort,
|
||||
engine,
|
||||
remoteEngine,
|
||||
remoteIpAddress
|
||||
);
|
||||
if (availablePort.toString() !== exposePort.toString()) {
|
||||
throw { status: 500, message: `Port ${exposePort} is already in use.` };
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const availablePort = await getFreeExposedPort(
|
||||
id,
|
||||
exposePort,
|
||||
engine,
|
||||
remoteEngine,
|
||||
remoteIpAddress
|
||||
);
|
||||
if (availablePort.toString() !== exposePort.toString()) {
|
||||
throw { status: 500, message: `Port ${exposePort} is already in use.` };
|
||||
}
|
||||
}
|
||||
}
|
||||
async function getFreeExposedPort(id, exposePort, engine, remoteEngine, remoteIpAddress) {
|
||||
const { default: checkPort } = await import("is-port-reachable");
|
||||
if (remoteEngine) {
|
||||
const applicationUsed = await (await import_prisma.prisma.application.findMany({
|
||||
where: {
|
||||
exposePort: { not: null },
|
||||
id: { not: id },
|
||||
destinationDocker: { remoteIpAddress }
|
||||
},
|
||||
select: { exposePort: true }
|
||||
})).map((a) => a.exposePort);
|
||||
const serviceUsed = await (await import_prisma.prisma.service.findMany({
|
||||
where: {
|
||||
exposePort: { not: null },
|
||||
id: { not: id },
|
||||
destinationDocker: { remoteIpAddress }
|
||||
},
|
||||
select: { exposePort: true }
|
||||
})).map((a) => a.exposePort);
|
||||
const usedPorts = [...applicationUsed, ...serviceUsed];
|
||||
if (usedPorts.includes(exposePort)) {
|
||||
return false;
|
||||
}
|
||||
const found = await checkPort(exposePort, { host: remoteIpAddress });
|
||||
if (!found) {
|
||||
return exposePort;
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
const applicationUsed = await (await import_prisma.prisma.application.findMany({
|
||||
where: { exposePort: { not: null }, id: { not: id }, destinationDocker: { engine } },
|
||||
select: { exposePort: true }
|
||||
})).map((a) => a.exposePort);
|
||||
const serviceUsed = await (await import_prisma.prisma.service.findMany({
|
||||
where: { exposePort: { not: null }, id: { not: id }, destinationDocker: { engine } },
|
||||
select: { exposePort: true }
|
||||
})).map((a) => a.exposePort);
|
||||
const usedPorts = [...applicationUsed, ...serviceUsed];
|
||||
if (usedPorts.includes(exposePort)) {
|
||||
return false;
|
||||
}
|
||||
const found = await checkPort(exposePort, { host: "localhost" });
|
||||
if (!found) {
|
||||
return exposePort;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
async function checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }) {
|
||||
const { isIP } = await import("is-ip");
|
||||
const domain = getDomain(fqdn);
|
||||
const domainDualCert = domain.includes("www.") ? domain.replace("www.", "") : `www.${domain}`;
|
||||
const { DNSServers } = await listSettings();
|
||||
if (DNSServers) {
|
||||
import_dns.promises.setServers([...DNSServers.split(",")]);
|
||||
}
|
||||
let resolves = [];
|
||||
try {
|
||||
if (isIP(hostname)) {
|
||||
resolves = [hostname];
|
||||
} else {
|
||||
resolves = await import_dns.promises.resolve4(hostname);
|
||||
}
|
||||
} catch (error) {
|
||||
throw { status: 500, message: `Could not determine IP address for ${hostname}.` };
|
||||
}
|
||||
if (dualCerts) {
|
||||
try {
|
||||
const ipDomain = await import_dns.promises.resolve4(domain);
|
||||
const ipDomainDualCert = await import_dns.promises.resolve4(domainDualCert);
|
||||
let ipDomainFound = false;
|
||||
let ipDomainDualCertFound = false;
|
||||
for (const ip of ipDomain) {
|
||||
if (resolves.includes(ip)) {
|
||||
ipDomainFound = true;
|
||||
}
|
||||
}
|
||||
for (const ip of ipDomainDualCert) {
|
||||
if (resolves.includes(ip)) {
|
||||
ipDomainDualCertFound = true;
|
||||
}
|
||||
}
|
||||
if (ipDomainFound && ipDomainDualCertFound)
|
||||
return { status: 200 };
|
||||
throw {
|
||||
status: 500,
|
||||
message: `DNS not set correctly or propogated.<br>Please check your DNS settings.`
|
||||
};
|
||||
} catch (error) {
|
||||
throw {
|
||||
status: 500,
|
||||
message: `DNS not set correctly or propogated.<br>Please check your DNS settings.`
|
||||
};
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
const ipDomain = await import_dns.promises.resolve4(domain);
|
||||
let ipDomainFound = false;
|
||||
for (const ip of ipDomain) {
|
||||
if (resolves.includes(ip)) {
|
||||
ipDomainFound = true;
|
||||
}
|
||||
}
|
||||
if (ipDomainFound)
|
||||
return { status: 200 };
|
||||
throw {
|
||||
status: 500,
|
||||
message: `DNS not set correctly or propogated.<br>Please check your DNS settings.`
|
||||
};
|
||||
} catch (error) {
|
||||
throw {
|
||||
status: 500,
|
||||
message: `DNS not set correctly or propogated.<br>Please check your DNS settings.`
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
const setDefaultConfiguration = async (data) => {
|
||||
let {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
} = data;
|
||||
const template = scanningTemplates[buildPack];
|
||||
if (!port) {
|
||||
port = template?.port || 3e3;
|
||||
if (buildPack === "static")
|
||||
port = 80;
|
||||
else if (buildPack === "node")
|
||||
port = 3e3;
|
||||
else if (buildPack === "php")
|
||||
port = 80;
|
||||
else if (buildPack === "python")
|
||||
port = 8e3;
|
||||
}
|
||||
if (!installCommand && buildPack !== "static" && buildPack !== "laravel")
|
||||
installCommand = template?.installCommand || "yarn install";
|
||||
if (!startCommand && buildPack !== "static" && buildPack !== "laravel")
|
||||
startCommand = template?.startCommand || "yarn start";
|
||||
if (!buildCommand && buildPack !== "static" && buildPack !== "laravel")
|
||||
buildCommand = template?.buildCommand || null;
|
||||
if (!publishDirectory)
|
||||
publishDirectory = template?.publishDirectory || null;
|
||||
if (baseDirectory) {
|
||||
if (!baseDirectory.startsWith("/"))
|
||||
baseDirectory = `/${baseDirectory}`;
|
||||
if (baseDirectory.endsWith("/") && baseDirectory !== "/")
|
||||
baseDirectory = baseDirectory.slice(0, -1);
|
||||
}
|
||||
if (dockerFileLocation) {
|
||||
if (!dockerFileLocation.startsWith("/"))
|
||||
dockerFileLocation = `/${dockerFileLocation}`;
|
||||
if (dockerFileLocation.endsWith("/"))
|
||||
dockerFileLocation = dockerFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerFileLocation = "/Dockerfile";
|
||||
}
|
||||
if (dockerComposeFileLocation) {
|
||||
if (!dockerComposeFileLocation.startsWith("/"))
|
||||
dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
|
||||
if (dockerComposeFileLocation.endsWith("/"))
|
||||
dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerComposeFileLocation = "/Dockerfile";
|
||||
}
|
||||
if (!denoMainFile) {
|
||||
denoMainFile = "main.ts";
|
||||
}
|
||||
return {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
};
|
||||
};
|
||||
const scanningTemplates = {
|
||||
"@sveltejs/kit": {
|
||||
buildPack: "nodejs"
|
||||
},
|
||||
astro: {
|
||||
buildPack: "astro"
|
||||
},
|
||||
"@11ty/eleventy": {
|
||||
buildPack: "eleventy"
|
||||
},
|
||||
svelte: {
|
||||
buildPack: "svelte"
|
||||
},
|
||||
"@nestjs/core": {
|
||||
buildPack: "nestjs"
|
||||
},
|
||||
next: {
|
||||
buildPack: "nextjs"
|
||||
},
|
||||
nuxt: {
|
||||
buildPack: "nuxtjs"
|
||||
},
|
||||
"react-scripts": {
|
||||
buildPack: "react"
|
||||
},
|
||||
"parcel-bundler": {
|
||||
buildPack: "static"
|
||||
},
|
||||
"@vue/cli-service": {
|
||||
buildPack: "vuejs"
|
||||
},
|
||||
vuejs: {
|
||||
buildPack: "vuejs"
|
||||
},
|
||||
gatsby: {
|
||||
buildPack: "gatsby"
|
||||
},
|
||||
"preact-cli": {
|
||||
buildPack: "react"
|
||||
}
|
||||
};
|
||||
async function cleanupDB(buildId, applicationId) {
|
||||
const data = await import_prisma.prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (data?.status === "queued" || data?.status === "running") {
|
||||
await import_prisma.prisma.build.update({ where: { id: buildId }, data: { status: "canceled" } });
|
||||
}
|
||||
await (0, import_logging.saveBuildLog)({ line: "Canceled.", buildId, applicationId });
|
||||
}
|
||||
const base64Encode = (text) => {
|
||||
return Buffer.from(text).toString("base64");
|
||||
};
|
||||
const base64Decode = (text) => {
|
||||
return Buffer.from(text, "base64").toString("ascii");
|
||||
};
|
||||
function parseSecret(secret, isBuild) {
|
||||
if (secret.value.includes("$")) {
|
||||
secret.value = secret.value.replaceAll("$", "$$$$");
|
||||
}
|
||||
if (secret.value.includes("\\n")) {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}=${secret.value}`;
|
||||
} else {
|
||||
return `${secret.name}=${secret.value}`;
|
||||
}
|
||||
} else if (secret.value.includes(" ")) {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}='${secret.value}'`;
|
||||
} else {
|
||||
return `${secret.name}='${secret.value}'`;
|
||||
}
|
||||
} else {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}=${secret.value}`;
|
||||
} else {
|
||||
return `${secret.name}=${secret.value}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
function generateSecrets(secrets, pullmergeRequestId, isBuild = false, port = null) {
|
||||
const envs = [];
|
||||
const isPRMRSecret = secrets.filter((s) => s.isPRMRSecret);
|
||||
const normalSecrets = secrets.filter((s) => !s.isPRMRSecret);
|
||||
if (pullmergeRequestId && isPRMRSecret.length > 0) {
|
||||
isPRMRSecret.forEach((secret) => {
|
||||
if (isBuild && !secret.isBuildSecret) {
|
||||
return;
|
||||
}
|
||||
const build = isBuild && secret.isBuildSecret;
|
||||
envs.push(parseSecret(secret, build));
|
||||
});
|
||||
}
|
||||
if (!pullmergeRequestId && normalSecrets.length > 0) {
|
||||
normalSecrets.forEach((secret) => {
|
||||
if (isBuild && !secret.isBuildSecret) {
|
||||
return;
|
||||
}
|
||||
const build = isBuild && secret.isBuildSecret;
|
||||
envs.push(parseSecret(secret, build));
|
||||
});
|
||||
}
|
||||
const portFound = envs.filter((env2) => env2.startsWith("PORT"));
|
||||
if (portFound.length === 0 && port && !isBuild) {
|
||||
envs.push(`PORT=${port}`);
|
||||
}
|
||||
const nodeEnv = envs.filter((env2) => env2.startsWith("NODE_ENV"));
|
||||
if (nodeEnv.length === 0 && !isBuild) {
|
||||
envs.push(`NODE_ENV=production`);
|
||||
}
|
||||
return envs;
|
||||
}
|
||||
function decryptApplication(application) {
|
||||
if (application) {
|
||||
if (application?.gitSource?.githubApp?.clientSecret) {
|
||||
application.gitSource.githubApp.clientSecret = decrypt(application.gitSource.githubApp.clientSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.webhookSecret) {
|
||||
application.gitSource.githubApp.webhookSecret = decrypt(application.gitSource.githubApp.webhookSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.privateKey) {
|
||||
application.gitSource.githubApp.privateKey = decrypt(application.gitSource.githubApp.privateKey) || null;
|
||||
}
|
||||
if (application?.gitSource?.gitlabApp?.appSecret) {
|
||||
application.gitSource.gitlabApp.appSecret = decrypt(application.gitSource.gitlabApp.appSecret) || null;
|
||||
}
|
||||
if (application?.secrets.length > 0) {
|
||||
application.secrets = application.secrets.map((s) => {
|
||||
s.value = decrypt(s.value) || null;
|
||||
return s;
|
||||
});
|
||||
}
|
||||
return application;
|
||||
}
|
||||
}
|
||||
async function pushToRegistry(application, workdir, tag, imageName, customTag) {
|
||||
const location = `${workdir}/.docker`;
|
||||
const tagCommand = `docker tag ${application.id}:${tag} ${imageName}:${customTag}`;
|
||||
const pushCommand = `docker --config ${location} push ${imageName}:${customTag}`;
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: application.destinationDockerId,
|
||||
command: tagCommand
|
||||
});
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: application.destinationDockerId,
|
||||
command: pushCommand
|
||||
});
|
||||
}
|
||||
async function getContainerUsage(dockerId, container) {
|
||||
try {
|
||||
const { stdout } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"`
|
||||
});
|
||||
return JSON.parse(stdout);
|
||||
} catch (err) {
|
||||
return {
|
||||
MemUsage: 0,
|
||||
CPUPerc: 0,
|
||||
NetIO: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
function fixType(type) {
|
||||
return type?.replaceAll(" ", "").toLowerCase() || null;
|
||||
}
|
||||
const compareSemanticVersions = (a, b) => {
|
||||
const a1 = a.split(".");
|
||||
const b1 = b.split(".");
|
||||
const len = Math.min(a1.length, b1.length);
|
||||
for (let i = 0; i < len; i++) {
|
||||
const a2 = +a1[i] || 0;
|
||||
const b2 = +b1[i] || 0;
|
||||
if (a2 !== b2) {
|
||||
return a2 > b2 ? 1 : -1;
|
||||
}
|
||||
}
|
||||
return b1.length - a1.length;
|
||||
};
|
||||
async function getTags(type) {
|
||||
try {
|
||||
if (type) {
|
||||
const tagsPath = isDev ? "./tags.json" : "/app/tags.json";
|
||||
const data = await import_promises.default.readFile(tagsPath, "utf8");
|
||||
let tags = JSON.parse(data);
|
||||
if (tags) {
|
||||
tags = tags.find((tag) => tag.name.includes(type));
|
||||
tags.tags = tags.tags.sort(compareSemanticVersions).reverse();
|
||||
return tags;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
function makeLabelForServices(type) {
|
||||
return [
|
||||
"coolify.managed=true",
|
||||
`coolify.version=${version}`,
|
||||
`coolify.type=service`,
|
||||
`coolify.service.type=${type}`
|
||||
];
|
||||
}
|
||||
const asyncSleep = (delay) => new Promise((resolve) => setTimeout(resolve, delay));
|
||||
async function startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort, type) {
|
||||
const { network, id: dockerId, remoteEngine } = destinationDocker;
|
||||
const container = `${id}-${publicPort}`;
|
||||
const { found } = await (0, import_docker.checkContainer)({ dockerId, container, remove: true });
|
||||
const { ipv4, ipv6 } = await listSettings();
|
||||
let dependentId = id;
|
||||
if (type === "wordpressftp")
|
||||
dependentId = `${id}-ftp`;
|
||||
const { found: foundDependentContainer } = await (0, import_docker.checkContainer)({
|
||||
dockerId,
|
||||
container: dependentId,
|
||||
remove: true
|
||||
});
|
||||
if (foundDependentContainer && !found) {
|
||||
const { stdout: Config } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||
});
|
||||
const ip = JSON.parse(Config)[0].Gateway;
|
||||
let traefikUrl = otherTraefikEndpoint;
|
||||
if (remoteEngine) {
|
||||
let ip2 = null;
|
||||
if (isDev) {
|
||||
ip2 = getAPIUrl();
|
||||
} else {
|
||||
ip2 = `http://${ipv4 || ipv6}:3000`;
|
||||
}
|
||||
traefikUrl = `${ip2}/webhooks/traefik/other.json`;
|
||||
}
|
||||
const tcpProxy = {
|
||||
version: "3.8",
|
||||
services: {
|
||||
[`${id}-${publicPort}`]: {
|
||||
container_name: container,
|
||||
image: defaultTraefikImage,
|
||||
command: [
|
||||
`--entrypoints.tcp.address=:${publicPort}`,
|
||||
`--entryPoints.tcp.forwardedHeaders.insecure=true`,
|
||||
`--providers.http.endpoint=${traefikUrl}?id=${id}&privatePort=${privatePort}&publicPort=${publicPort}&type=tcp&address=${dependentId}`,
|
||||
"--providers.http.pollTimeout=10s",
|
||||
"--log.level=error"
|
||||
],
|
||||
ports: [`${publicPort}:${publicPort}`],
|
||||
extra_hosts: ["host.docker.internal:host-gateway", `host.docker.internal: ${ip}`],
|
||||
volumes: ["/var/run/docker.sock:/var/run/docker.sock"],
|
||||
networks: ["coolify-infra", network]
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: false,
|
||||
name: network
|
||||
},
|
||||
"coolify-infra": {
|
||||
external: false,
|
||||
name: "coolify-infra"
|
||||
}
|
||||
}
|
||||
};
|
||||
await import_promises.default.writeFile(`/tmp/docker-compose-${id}.yaml`, import_js_yaml.default.dump(tcpProxy));
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker compose -f /tmp/docker-compose-${id}.yaml up -d`
|
||||
});
|
||||
await import_promises.default.rm(`/tmp/docker-compose-${id}.yaml`);
|
||||
}
|
||||
if (!foundDependentContainer && found) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
}
|
||||
async function startTraefikProxy(id) {
|
||||
const { engine, network, remoteEngine, remoteIpAddress } = await import_prisma.prisma.destinationDocker.findUnique({ where: { id } });
|
||||
const { found } = await (0, import_docker.checkContainer)({
|
||||
dockerId: id,
|
||||
container: "coolify-proxy",
|
||||
remove: true
|
||||
});
|
||||
const { id: settingsId, ipv4, ipv6 } = await listSettings();
|
||||
if (!found) {
|
||||
const { stdout: coolifyNetwork } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: id,
|
||||
command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`
|
||||
});
|
||||
if (!coolifyNetwork) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: id,
|
||||
command: `docker network create --attachable coolify-infra`
|
||||
});
|
||||
}
|
||||
const { stdout: Config } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: id,
|
||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||
});
|
||||
const ip = JSON.parse(Config)[0].Gateway;
|
||||
let traefikUrl = mainTraefikEndpoint;
|
||||
if (remoteEngine) {
|
||||
let ip2 = null;
|
||||
if (isDev) {
|
||||
ip2 = getAPIUrl();
|
||||
} else {
|
||||
ip2 = `http://${ipv4 || ipv6}:3000`;
|
||||
}
|
||||
traefikUrl = `${ip2}/webhooks/traefik/remote/${id}`;
|
||||
}
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: id,
|
||||
command: `docker run --restart always --add-host 'host.docker.internal:host-gateway' ${ip ? `--add-host 'host.docker.internal:${ip}'` : ""} -v coolify-traefik-letsencrypt:/etc/traefik/acme -v /var/run/docker.sock:/var/run/docker.sock --network coolify-infra -p "80:80" -p "443:443" --name coolify-proxy -d ${defaultTraefikImage} --entrypoints.web.address=:80 --entrypoints.web.forwardedHeaders.insecure=true --entrypoints.websecure.address=:443 --entrypoints.websecure.forwardedHeaders.insecure=true --providers.docker=true --providers.docker.exposedbydefault=false --providers.http.endpoint=${traefikUrl} --providers.http.pollTimeout=5s --certificatesresolvers.letsencrypt.acme.httpchallenge=true --certificatesresolvers.letsencrypt.acme.storage=/etc/traefik/acme/acme.json --certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web --log.level=error`
|
||||
});
|
||||
await import_prisma.prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: true }
|
||||
});
|
||||
}
|
||||
if (engine) {
|
||||
const destinations = await import_prisma.prisma.destinationDocker.findMany({ where: { engine } });
|
||||
for (const destination of destinations) {
|
||||
await configureNetworkTraefikProxy(destination);
|
||||
}
|
||||
}
|
||||
if (remoteEngine) {
|
||||
const destinations = await import_prisma.prisma.destinationDocker.findMany({ where: { remoteIpAddress } });
|
||||
for (const destination of destinations) {
|
||||
await configureNetworkTraefikProxy(destination);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function configureNetworkTraefikProxy(destination) {
|
||||
const { id } = destination;
|
||||
const { stdout: networks } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: id,
|
||||
command: `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'`
|
||||
});
|
||||
const configuredNetworks = networks.replace(/"/g, "").replace("\n", "").split(",");
|
||||
if (!configuredNetworks.includes(destination.network)) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destination.id,
|
||||
command: `docker network connect ${destination.network} coolify-proxy`
|
||||
});
|
||||
}
|
||||
}
|
||||
async function stopTraefikProxy(id) {
|
||||
const { found } = await (0, import_docker.checkContainer)({ dockerId: id, container: "coolify-proxy" });
|
||||
await import_prisma.prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: false }
|
||||
});
|
||||
if (found) {
|
||||
return await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: id,
|
||||
command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
return { stdout: "", stderr: "" };
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
asyncSleep,
|
||||
base64Decode,
|
||||
base64Encode,
|
||||
checkDomainsIsValidInDNS,
|
||||
checkExposedPort,
|
||||
cleanupDB,
|
||||
comparePassword,
|
||||
configureNetworkTraefikProxy,
|
||||
createDirectories,
|
||||
decrypt,
|
||||
decryptApplication,
|
||||
defaultTraefikImage,
|
||||
encrypt,
|
||||
fixType,
|
||||
generateRangeArray,
|
||||
generateSecrets,
|
||||
generateTimestamp,
|
||||
getAPIUrl,
|
||||
getContainerUsage,
|
||||
getCurrentUser,
|
||||
getDomain,
|
||||
getFreeExposedPort,
|
||||
getTags,
|
||||
getTeamInvitation,
|
||||
getTemplates,
|
||||
getUIUrl,
|
||||
hashPassword,
|
||||
isARM,
|
||||
isDev,
|
||||
isDomainConfigured,
|
||||
listSettings,
|
||||
makeLabelForServices,
|
||||
pushToRegistry,
|
||||
removeService,
|
||||
saveDockerRegistryCredentials,
|
||||
scanningTemplates,
|
||||
sentryDSN,
|
||||
setDefaultConfiguration,
|
||||
startTraefikProxy,
|
||||
startTraefikTCPProxy,
|
||||
stopTraefikProxy,
|
||||
uniqueName,
|
||||
version
|
||||
});
|
||||
38
apps/trpc-experimental/server/build/lib/dayjs.js
Normal file
38
apps/trpc-experimental/server/build/lib/dayjs.js
Normal file
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var dayjs_exports = {};
|
||||
__export(dayjs_exports, {
|
||||
day: () => import_dayjs.default
|
||||
});
|
||||
module.exports = __toCommonJS(dayjs_exports);
|
||||
var import_dayjs = __toESM(require("dayjs"));
|
||||
var import_utc = __toESM(require("dayjs/plugin/utc.js"));
|
||||
var import_relativeTime = __toESM(require("dayjs/plugin/relativeTime.js"));
|
||||
import_dayjs.default.extend(import_utc.default);
|
||||
import_dayjs.default.extend(import_relativeTime.default);
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
day
|
||||
});
|
||||
168
apps/trpc-experimental/server/build/lib/docker.js
Normal file
168
apps/trpc-experimental/server/build/lib/docker.js
Normal file
@@ -0,0 +1,168 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var docker_exports = {};
|
||||
__export(docker_exports, {
|
||||
checkContainer: () => checkContainer,
|
||||
defaultComposeConfiguration: () => defaultComposeConfiguration,
|
||||
formatLabelsOnDocker: () => formatLabelsOnDocker,
|
||||
removeContainer: () => removeContainer,
|
||||
stopDatabaseContainer: () => stopDatabaseContainer,
|
||||
stopTcpHttpProxy: () => stopTcpHttpProxy
|
||||
});
|
||||
module.exports = __toCommonJS(docker_exports);
|
||||
var import_executeCommand = require("./executeCommand");
|
||||
async function checkContainer({
|
||||
dockerId,
|
||||
container,
|
||||
remove = false
|
||||
}) {
|
||||
let containerFound = false;
|
||||
try {
|
||||
const { stdout } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker inspect --format '{{json .State}}' ${container}`
|
||||
});
|
||||
containerFound = true;
|
||||
const parsedStdout = JSON.parse(stdout);
|
||||
const status = parsedStdout.Status;
|
||||
const isRunning = status === "running";
|
||||
const isRestarting = status === "restarting";
|
||||
const isExited = status === "exited";
|
||||
if (status === "created") {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker rm ${container}`
|
||||
});
|
||||
}
|
||||
if (remove && status === "exited") {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker rm ${container}`
|
||||
});
|
||||
}
|
||||
return {
|
||||
found: containerFound,
|
||||
status: {
|
||||
isRunning,
|
||||
isRestarting,
|
||||
isExited
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
}
|
||||
return {
|
||||
found: false
|
||||
};
|
||||
}
|
||||
async function removeContainer({
|
||||
id,
|
||||
dockerId
|
||||
}) {
|
||||
try {
|
||||
const { stdout } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker inspect --format '{{json .State}}' ${id}`
|
||||
});
|
||||
if (JSON.parse(stdout).Running) {
|
||||
await (0, import_executeCommand.executeCommand)({ dockerId, command: `docker stop -t 0 ${id}` });
|
||||
await (0, import_executeCommand.executeCommand)({ dockerId, command: `docker rm ${id}` });
|
||||
}
|
||||
if (JSON.parse(stdout).Status === "exited") {
|
||||
await (0, import_executeCommand.executeCommand)({ dockerId, command: `docker rm ${id}` });
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
async function stopDatabaseContainer(database) {
|
||||
let everStarted = false;
|
||||
const {
|
||||
id,
|
||||
destinationDockerId,
|
||||
destinationDocker: { engine, id: dockerId }
|
||||
} = database;
|
||||
if (destinationDockerId) {
|
||||
try {
|
||||
const { stdout } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker inspect --format '{{json .State}}' ${id}`
|
||||
});
|
||||
if (stdout) {
|
||||
everStarted = true;
|
||||
await removeContainer({ id, dockerId });
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
}
|
||||
return everStarted;
|
||||
}
|
||||
async function stopTcpHttpProxy(id, destinationDocker, publicPort, forceName = null) {
|
||||
const { id: dockerId } = destinationDocker;
|
||||
let container = `${id}-${publicPort}`;
|
||||
if (forceName)
|
||||
container = forceName;
|
||||
const { found } = await checkContainer({ dockerId, container });
|
||||
try {
|
||||
if (!found)
|
||||
return true;
|
||||
return await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||
shell: true
|
||||
});
|
||||
} catch (error) {
|
||||
return error;
|
||||
}
|
||||
}
|
||||
function formatLabelsOnDocker(data) {
|
||||
return data.trim().split("\n").map((a) => JSON.parse(a)).map((container) => {
|
||||
const labels = container.Labels.split(",");
|
||||
let jsonLabels = {};
|
||||
labels.forEach((l) => {
|
||||
const name = l.split("=")[0];
|
||||
const value = l.split("=")[1];
|
||||
jsonLabels = { ...jsonLabels, ...{ [name]: value } };
|
||||
});
|
||||
container.Labels = jsonLabels;
|
||||
return container;
|
||||
});
|
||||
}
|
||||
function defaultComposeConfiguration(network) {
|
||||
return {
|
||||
networks: [network],
|
||||
restart: "on-failure",
|
||||
deploy: {
|
||||
restart_policy: {
|
||||
condition: "on-failure",
|
||||
delay: "5s",
|
||||
max_attempts: 10,
|
||||
window: "120s"
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
checkContainer,
|
||||
defaultComposeConfiguration,
|
||||
formatLabelsOnDocker,
|
||||
removeContainer,
|
||||
stopDatabaseContainer,
|
||||
stopTcpHttpProxy
|
||||
});
|
||||
207
apps/trpc-experimental/server/build/lib/executeCommand.js
Normal file
207
apps/trpc-experimental/server/build/lib/executeCommand.js
Normal file
@@ -0,0 +1,207 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var executeCommand_exports = {};
|
||||
__export(executeCommand_exports, {
|
||||
createRemoteEngineConfiguration: () => createRemoteEngineConfiguration,
|
||||
executeCommand: () => executeCommand
|
||||
});
|
||||
module.exports = __toCommonJS(executeCommand_exports);
|
||||
var import_prisma = require("../prisma");
|
||||
var import_os = __toESM(require("os"));
|
||||
var import_promises = __toESM(require("fs/promises"));
|
||||
var import_ssh_config = __toESM(require("ssh-config"));
|
||||
var import_ssh = require("./ssh");
|
||||
var import_env = require("../env");
|
||||
var import_logging = require("./logging");
|
||||
var import_common = require("./common");
|
||||
async function executeCommand({
|
||||
command,
|
||||
dockerId = null,
|
||||
sshCommand = false,
|
||||
shell = false,
|
||||
stream = false,
|
||||
buildId,
|
||||
applicationId,
|
||||
debug
|
||||
}) {
|
||||
const { execa, execaCommand } = await import("execa");
|
||||
const { parse } = await import("shell-quote");
|
||||
const parsedCommand = parse(command);
|
||||
const dockerCommand = parsedCommand[0];
|
||||
const dockerArgs = parsedCommand.slice(1);
|
||||
if (dockerId && dockerCommand && dockerArgs) {
|
||||
const destinationDocker = await import_prisma.prisma.destinationDocker.findUnique({
|
||||
where: { id: dockerId }
|
||||
});
|
||||
if (!destinationDocker) {
|
||||
throw new Error("Destination docker not found");
|
||||
}
|
||||
let { remoteEngine, remoteIpAddress, engine } = destinationDocker;
|
||||
if (remoteEngine) {
|
||||
await createRemoteEngineConfiguration(dockerId);
|
||||
engine = `ssh://${remoteIpAddress}-remote`;
|
||||
} else {
|
||||
engine = "unix:///var/run/docker.sock";
|
||||
}
|
||||
if (import_env.env.CODESANDBOX_HOST) {
|
||||
if (command.startsWith("docker compose")) {
|
||||
command = command.replace(/docker compose/gi, "docker-compose");
|
||||
}
|
||||
}
|
||||
if (sshCommand) {
|
||||
if (shell) {
|
||||
return execaCommand(`ssh ${remoteIpAddress}-remote ${command}`);
|
||||
}
|
||||
return await execa("ssh", [`${remoteIpAddress}-remote`, dockerCommand, ...dockerArgs]);
|
||||
}
|
||||
if (stream) {
|
||||
return await new Promise(async (resolve, reject) => {
|
||||
let subprocess = null;
|
||||
if (shell) {
|
||||
subprocess = execaCommand(command, {
|
||||
env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine }
|
||||
});
|
||||
} else {
|
||||
subprocess = execa(dockerCommand, dockerArgs, {
|
||||
env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine }
|
||||
});
|
||||
}
|
||||
const logs = [];
|
||||
if (subprocess && subprocess.stdout && subprocess.stderr) {
|
||||
subprocess.stdout.on("data", async (data) => {
|
||||
const stdout = data.toString();
|
||||
const array = stdout.split("\n");
|
||||
for (const line of array) {
|
||||
if (line !== "\n" && line !== "") {
|
||||
const log = {
|
||||
line: `${line.replace("\n", "")}`,
|
||||
buildId,
|
||||
applicationId
|
||||
};
|
||||
logs.push(log);
|
||||
if (debug) {
|
||||
await (0, import_logging.saveBuildLog)(log);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
subprocess.stderr.on("data", async (data) => {
|
||||
const stderr = data.toString();
|
||||
const array = stderr.split("\n");
|
||||
for (const line of array) {
|
||||
if (line !== "\n" && line !== "") {
|
||||
const log = {
|
||||
line: `${line.replace("\n", "")}`,
|
||||
buildId,
|
||||
applicationId
|
||||
};
|
||||
logs.push(log);
|
||||
if (debug) {
|
||||
await (0, import_logging.saveBuildLog)(log);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
subprocess.on("exit", async (code) => {
|
||||
if (code === 0) {
|
||||
resolve(code);
|
||||
} else {
|
||||
if (!debug) {
|
||||
for (const log of logs) {
|
||||
await (0, import_logging.saveBuildLog)(log);
|
||||
}
|
||||
}
|
||||
reject(code);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (shell) {
|
||||
return await execaCommand(command, {
|
||||
env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine }
|
||||
});
|
||||
} else {
|
||||
return await execa(dockerCommand, dockerArgs, {
|
||||
env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine }
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (shell) {
|
||||
return execaCommand(command, { shell: true });
|
||||
}
|
||||
return await execa(dockerCommand, dockerArgs);
|
||||
}
|
||||
}
|
||||
async function createRemoteEngineConfiguration(id) {
|
||||
const homedir = import_os.default.homedir();
|
||||
const sshKeyFile = `/tmp/id_rsa-${id}`;
|
||||
const localPort = await (0, import_ssh.getFreeSSHLocalPort)(id);
|
||||
const {
|
||||
sshKey: { privateKey },
|
||||
network,
|
||||
remoteIpAddress,
|
||||
remotePort,
|
||||
remoteUser
|
||||
} = await import_prisma.prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
||||
await import_promises.default.writeFile(sshKeyFile, (0, import_common.decrypt)(privateKey) + "\n", { encoding: "utf8", mode: 400 });
|
||||
const config = import_ssh_config.default.parse("");
|
||||
const Host = `${remoteIpAddress}-remote`;
|
||||
try {
|
||||
await executeCommand({ command: `ssh-keygen -R ${Host}` });
|
||||
await executeCommand({ command: `ssh-keygen -R ${remoteIpAddress}` });
|
||||
await executeCommand({ command: `ssh-keygen -R localhost:${localPort}` });
|
||||
} catch (error) {
|
||||
}
|
||||
const found = config.find({ Host });
|
||||
const foundIp = config.find({ Host: remoteIpAddress });
|
||||
if (found)
|
||||
config.remove({ Host });
|
||||
if (foundIp)
|
||||
config.remove({ Host: remoteIpAddress });
|
||||
config.append({
|
||||
Host,
|
||||
Hostname: remoteIpAddress,
|
||||
Port: remotePort.toString(),
|
||||
User: remoteUser,
|
||||
StrictHostKeyChecking: "no",
|
||||
IdentityFile: sshKeyFile,
|
||||
ControlMaster: "auto",
|
||||
ControlPath: `${homedir}/.ssh/coolify-${remoteIpAddress}-%r@%h:%p`,
|
||||
ControlPersist: "10m"
|
||||
});
|
||||
try {
|
||||
await import_promises.default.stat(`${homedir}/.ssh/`);
|
||||
} catch (error) {
|
||||
await import_promises.default.mkdir(`${homedir}/.ssh/`);
|
||||
}
|
||||
return await import_promises.default.writeFile(`${homedir}/.ssh/config`, import_ssh_config.default.stringify(config));
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
createRemoteEngineConfiguration,
|
||||
executeCommand
|
||||
});
|
||||
108
apps/trpc-experimental/server/build/lib/importers/github.js
Normal file
108
apps/trpc-experimental/server/build/lib/importers/github.js
Normal file
@@ -0,0 +1,108 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var github_exports = {};
|
||||
__export(github_exports, {
|
||||
default: () => github_default
|
||||
});
|
||||
module.exports = __toCommonJS(github_exports);
|
||||
var import_jsonwebtoken = __toESM(require("jsonwebtoken"));
|
||||
var import_prisma = require("../../prisma");
|
||||
var import_common = require("../buildPacks/common");
|
||||
var import_common2 = require("../common");
|
||||
var import_executeCommand = require("../executeCommand");
|
||||
async function github_default({
|
||||
applicationId,
|
||||
workdir,
|
||||
githubAppId,
|
||||
repository,
|
||||
apiUrl,
|
||||
gitCommitHash,
|
||||
htmlUrl,
|
||||
branch,
|
||||
buildId,
|
||||
customPort,
|
||||
forPublic
|
||||
}) {
|
||||
const { default: got } = await import("got");
|
||||
const url = htmlUrl.replace("https://", "").replace("http://", "");
|
||||
if (forPublic) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
command: `git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
} else {
|
||||
const body = await import_prisma.prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||
if (body.privateKey)
|
||||
body.privateKey = (0, import_common2.decrypt)(body.privateKey);
|
||||
const { privateKey, appId, installationId } = body;
|
||||
const githubPrivateKey = privateKey.replace(/\\n/g, "\n").replace(/"/g, "");
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1e3),
|
||||
exp: Math.round(new Date().getTime() / 1e3 + 60),
|
||||
iss: appId
|
||||
};
|
||||
const jwtToken = import_jsonwebtoken.default.sign(payload, githubPrivateKey, {
|
||||
algorithm: "RS256"
|
||||
});
|
||||
const { token } = await got.post(`${apiUrl}/app/installations/${installationId}/access_tokens`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
Accept: "application/vnd.github.machine-man-preview+json"
|
||||
}
|
||||
}).json();
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
command: `git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
const { stdout: commit } = await (0, import_executeCommand.executeCommand)({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
return commit.replace("\n", "");
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
76
apps/trpc-experimental/server/build/lib/importers/gitlab.js
Normal file
76
apps/trpc-experimental/server/build/lib/importers/gitlab.js
Normal file
@@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var gitlab_exports = {};
|
||||
__export(gitlab_exports, {
|
||||
default: () => gitlab_default
|
||||
});
|
||||
module.exports = __toCommonJS(gitlab_exports);
|
||||
var import_common = require("../buildPacks/common");
|
||||
var import_executeCommand = require("../executeCommand");
|
||||
async function gitlab_default({
|
||||
applicationId,
|
||||
workdir,
|
||||
repodir,
|
||||
htmlUrl,
|
||||
gitCommitHash,
|
||||
repository,
|
||||
branch,
|
||||
buildId,
|
||||
privateSshKey,
|
||||
customPort,
|
||||
forPublic,
|
||||
customUser
|
||||
}) {
|
||||
const url = htmlUrl.replace("https://", "").replace("http://", "").replace(/\/$/, "");
|
||||
if (!forPublic) {
|
||||
await (0, import_executeCommand.executeCommand)({ command: `echo '${privateSshKey}' > ${repodir}/id.rsa`, shell: true });
|
||||
await (0, import_executeCommand.executeCommand)({ command: `chmod 600 ${repodir}/id.rsa` });
|
||||
}
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await (0, import_common.saveBuildLog)({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
if (forPublic) {
|
||||
await (0, import_executeCommand.executeCommand)(
|
||||
{
|
||||
command: `git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await (0, import_executeCommand.executeCommand)(
|
||||
{
|
||||
command: `git clone -q -b ${branch} ${customUser}@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
}
|
||||
);
|
||||
}
|
||||
const { stdout: commit } = await (0, import_executeCommand.executeCommand)({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
return commit.replace("\n", "");
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {});
|
||||
37
apps/trpc-experimental/server/build/lib/importers/index.js
Normal file
37
apps/trpc-experimental/server/build/lib/importers/index.js
Normal file
@@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var importers_exports = {};
|
||||
__export(importers_exports, {
|
||||
github: () => import_github.default,
|
||||
gitlab: () => import_gitlab.default
|
||||
});
|
||||
module.exports = __toCommonJS(importers_exports);
|
||||
var import_github = __toESM(require("./github"));
|
||||
var import_gitlab = __toESM(require("./gitlab"));
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
github,
|
||||
gitlab
|
||||
});
|
||||
75
apps/trpc-experimental/server/build/lib/logging.js
Normal file
75
apps/trpc-experimental/server/build/lib/logging.js
Normal file
@@ -0,0 +1,75 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var logging_exports = {};
|
||||
__export(logging_exports, {
|
||||
saveBuildLog: () => saveBuildLog
|
||||
});
|
||||
module.exports = __toCommonJS(logging_exports);
|
||||
var import_prisma = require("../prisma");
|
||||
var import_common = require("./common");
|
||||
var import_dayjs = require("./dayjs");
|
||||
const saveBuildLog = async ({ line, buildId, applicationId }) => {
|
||||
if (buildId === "undefined" || buildId === "null" || !buildId)
|
||||
return;
|
||||
if (applicationId === "undefined" || applicationId === "null" || !applicationId)
|
||||
return;
|
||||
const { default: got } = await import("got");
|
||||
if (typeof line === "object" && line) {
|
||||
if (line.shortMessage) {
|
||||
line = line.shortMessage + "\n" + line.stderr;
|
||||
} else {
|
||||
line = JSON.stringify(line);
|
||||
}
|
||||
}
|
||||
if (line && typeof line === "string" && line.includes("ghs_")) {
|
||||
const regex = /ghs_.*@/g;
|
||||
line = line.replace(regex, "<SENSITIVE_DATA_DELETED>@");
|
||||
}
|
||||
const addTimestamp = `[${(0, import_common.generateTimestamp)()}] ${line}`;
|
||||
const fluentBitUrl = import_common.isDev ? "http://localhost:24224" : "http://coolify-fluentbit:24224";
|
||||
if (import_common.isDev) {
|
||||
console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
}
|
||||
try {
|
||||
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
|
||||
json: {
|
||||
line: (0, import_common.encrypt)(line)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
return await import_prisma.prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp,
|
||||
buildId,
|
||||
time: Number((0, import_dayjs.day)().valueOf()),
|
||||
applicationId
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
saveBuildLog
|
||||
});
|
||||
76
apps/trpc-experimental/server/build/lib/ssh.js
Normal file
76
apps/trpc-experimental/server/build/lib/ssh.js
Normal file
@@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var ssh_exports = {};
|
||||
__export(ssh_exports, {
|
||||
getFreeSSHLocalPort: () => getFreeSSHLocalPort
|
||||
});
|
||||
module.exports = __toCommonJS(ssh_exports);
|
||||
var import_prisma = require("../prisma");
|
||||
var import_common = require("./common");
|
||||
async function getFreeSSHLocalPort(id) {
|
||||
const { default: isReachable } = await import("is-port-reachable");
|
||||
const { remoteIpAddress, sshLocalPort } = await import_prisma.prisma.destinationDocker.findUnique({
|
||||
where: { id }
|
||||
});
|
||||
if (sshLocalPort) {
|
||||
return Number(sshLocalPort);
|
||||
}
|
||||
const data = await import_prisma.prisma.setting.findFirst();
|
||||
const { minPort, maxPort } = data;
|
||||
const ports = await import_prisma.prisma.destinationDocker.findMany({
|
||||
where: { sshLocalPort: { not: null }, remoteIpAddress: { not: remoteIpAddress } }
|
||||
});
|
||||
const alreadyConfigured = await import_prisma.prisma.destinationDocker.findFirst({
|
||||
where: {
|
||||
remoteIpAddress,
|
||||
id: { not: id },
|
||||
sshLocalPort: { not: null }
|
||||
}
|
||||
});
|
||||
if (alreadyConfigured?.sshLocalPort) {
|
||||
await import_prisma.prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { sshLocalPort: alreadyConfigured.sshLocalPort }
|
||||
});
|
||||
return Number(alreadyConfigured.sshLocalPort);
|
||||
}
|
||||
const range = (0, import_common.generateRangeArray)(minPort, maxPort);
|
||||
const availablePorts = range.filter((port) => !ports.map((p) => p.sshLocalPort).includes(port));
|
||||
for (const port of availablePorts) {
|
||||
const found = await isReachable(port, { host: "localhost" });
|
||||
if (!found) {
|
||||
await import_prisma.prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { sshLocalPort: Number(port) }
|
||||
});
|
||||
return Number(port);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
getFreeSSHLocalPort
|
||||
});
|
||||
36
apps/trpc-experimental/server/build/prisma.js
Normal file
36
apps/trpc-experimental/server/build/prisma.js
Normal file
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var prisma_exports = {};
|
||||
__export(prisma_exports, {
|
||||
prisma: () => prisma
|
||||
});
|
||||
module.exports = __toCommonJS(prisma_exports);
|
||||
var import_env = require("./env");
|
||||
var import_client = require("@prisma/client");
|
||||
const prismaGlobal = global;
|
||||
const prisma = prismaGlobal.prisma || new import_client.PrismaClient({
|
||||
log: import_env.env.NODE_ENV !== "development" ? ["query", "error", "warn"] : ["error"]
|
||||
});
|
||||
if (import_env.env.NODE_ENV !== "production") {
|
||||
prismaGlobal.prisma = prisma;
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
prisma
|
||||
});
|
||||
48
apps/trpc-experimental/server/build/scheduler.js
Normal file
48
apps/trpc-experimental/server/build/scheduler.js
Normal file
@@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var scheduler_exports = {};
|
||||
__export(scheduler_exports, {
|
||||
isDev: () => isDev,
|
||||
scheduler: () => scheduler
|
||||
});
|
||||
module.exports = __toCommonJS(scheduler_exports);
|
||||
var import_bree = __toESM(require("bree"));
|
||||
var import_path = __toESM(require("path"));
|
||||
var import_ts_worker = __toESM(require("@breejs/ts-worker"));
|
||||
const isDev = process.env["NODE_ENV"] === "development";
|
||||
import_bree.default.extend(import_ts_worker.default);
|
||||
const options = {
|
||||
defaultExtension: "js",
|
||||
logger: false,
|
||||
jobs: [{ name: "applicationBuildQueue" }]
|
||||
};
|
||||
if (isDev)
|
||||
options.root = import_path.default.join(__dirname, "./jobs");
|
||||
const scheduler = new import_bree.default(options);
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
isDev,
|
||||
scheduler
|
||||
});
|
||||
99
apps/trpc-experimental/server/build/server.js
Normal file
99
apps/trpc-experimental/server/build/server.js
Normal file
@@ -0,0 +1,99 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var server_exports = {};
|
||||
__export(server_exports, {
|
||||
createServer: () => createServer
|
||||
});
|
||||
module.exports = __toCommonJS(server_exports);
|
||||
var import_fastify = require("@trpc/server/adapters/fastify");
|
||||
var import_fastify2 = __toESM(require("fastify"));
|
||||
var import_trpc = require("./trpc");
|
||||
var import_context = require("./trpc/context");
|
||||
var import_cors = __toESM(require("@fastify/cors"));
|
||||
var path = __toESM(require("node:path"));
|
||||
var import_static = __toESM(require("@fastify/static"));
|
||||
var import_autoload = __toESM(require("@fastify/autoload"));
|
||||
var import_graceful = __toESM(require("@ladjs/graceful"));
|
||||
var import_scheduler = require("./scheduler");
|
||||
const isDev = process.env["NODE_ENV"] === "development";
|
||||
function createServer(opts) {
|
||||
const dev = opts.dev ?? true;
|
||||
const port = opts.port ?? 3e3;
|
||||
const prefix = opts.prefix ?? "/trpc";
|
||||
const server = (0, import_fastify2.default)({ logger: dev, trustProxy: true });
|
||||
server.register(import_cors.default);
|
||||
server.register(import_fastify.fastifyTRPCPlugin, {
|
||||
prefix,
|
||||
trpcOptions: {
|
||||
router: import_trpc.appRouter,
|
||||
createContext: import_context.createContext,
|
||||
onError({ error, type, path: path2, input, ctx, req }) {
|
||||
console.error("Error:", error);
|
||||
if (error.code === "INTERNAL_SERVER_ERROR") {
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (!isDev) {
|
||||
server.register(import_static.default, {
|
||||
root: path.join(__dirname, "./public"),
|
||||
preCompressed: true
|
||||
});
|
||||
server.setNotFoundHandler(async function(request, reply) {
|
||||
if (request.raw.url && request.raw.url.startsWith("/api")) {
|
||||
return reply.status(404).send({
|
||||
success: false
|
||||
});
|
||||
}
|
||||
return reply.status(200).sendFile("index.html");
|
||||
});
|
||||
}
|
||||
server.register(import_autoload.default, {
|
||||
dir: path.join(__dirname, "api"),
|
||||
options: { prefix: "/api" }
|
||||
});
|
||||
const stop = () => server.close();
|
||||
const start = async () => {
|
||||
try {
|
||||
await server.listen({ host: "0.0.0.0", port });
|
||||
console.log("Coolify server is listening on port", port, "at 0.0.0.0 \u{1F680}");
|
||||
const graceful = new import_graceful.default({ brees: [import_scheduler.scheduler] });
|
||||
graceful.listen();
|
||||
setInterval(async () => {
|
||||
if (!import_scheduler.scheduler.workers.has("applicationBuildQueue")) {
|
||||
import_scheduler.scheduler.run("applicationBuildQueue");
|
||||
}
|
||||
}, 2e3);
|
||||
} catch (err) {
|
||||
server.log.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
return { server, start, stop };
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
createServer
|
||||
});
|
||||
43
apps/trpc-experimental/server/build/trpc/context.js
Normal file
43
apps/trpc-experimental/server/build/trpc/context.js
Normal file
@@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var context_exports = {};
|
||||
__export(context_exports, {
|
||||
createContext: () => createContext
|
||||
});
|
||||
module.exports = __toCommonJS(context_exports);
|
||||
var import_jsonwebtoken = __toESM(require("jsonwebtoken"));
|
||||
var import_env = require("../env");
|
||||
function createContext({ req }) {
|
||||
const token = req.headers.authorization;
|
||||
let user = null;
|
||||
if (token) {
|
||||
user = import_jsonwebtoken.default.verify(token, import_env.env.COOLIFY_SECRET_KEY);
|
||||
}
|
||||
return { user, hostname: req.hostname };
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
createContext
|
||||
});
|
||||
39
apps/trpc-experimental/server/build/trpc/index.js
Normal file
39
apps/trpc-experimental/server/build/trpc/index.js
Normal file
@@ -0,0 +1,39 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var trpc_exports = {};
|
||||
__export(trpc_exports, {
|
||||
appRouter: () => appRouter
|
||||
});
|
||||
module.exports = __toCommonJS(trpc_exports);
|
||||
var import_trpc = require("./trpc");
|
||||
var import_routers = require("./routers");
|
||||
const appRouter = (0, import_trpc.router)({
|
||||
settings: import_routers.settingsRouter,
|
||||
auth: import_routers.authRouter,
|
||||
dashboard: import_routers.dashboardRouter,
|
||||
applications: import_routers.applicationsRouter,
|
||||
services: import_routers.servicesRouter,
|
||||
databases: import_routers.databasesRouter,
|
||||
sources: import_routers.sourcesRouter,
|
||||
destinations: import_routers.destinationsRouter
|
||||
});
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
appRouter
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,526 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var lib_exports = {};
|
||||
__export(lib_exports, {
|
||||
deployApplication: () => deployApplication,
|
||||
generateConfigHash: () => generateConfigHash,
|
||||
getApplicationFromDB: () => getApplicationFromDB,
|
||||
setDefaultBaseImage: () => setDefaultBaseImage
|
||||
});
|
||||
module.exports = __toCommonJS(lib_exports);
|
||||
var import_cuid = __toESM(require("cuid"));
|
||||
var import_node_crypto = __toESM(require("node:crypto"));
|
||||
var import_common = require("../../../lib/common");
|
||||
var import_prisma = require("../../../prisma");
|
||||
async function deployApplication(id, teamId, forceRebuild, pullmergeRequestId = null, branch = null) {
|
||||
const buildId = (0, import_cuid.default)();
|
||||
const application = await getApplicationFromDB(id, teamId);
|
||||
if (application) {
|
||||
if (!application?.configHash) {
|
||||
await generateConfigHash(
|
||||
id,
|
||||
application.buildPack,
|
||||
application.port,
|
||||
application.exposePort,
|
||||
application.installCommand,
|
||||
application.buildCommand,
|
||||
application.startCommand
|
||||
);
|
||||
}
|
||||
await import_prisma.prisma.application.update({ where: { id }, data: { updatedAt: new Date() } });
|
||||
if (application.gitSourceId) {
|
||||
await import_prisma.prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
applicationId: id,
|
||||
sourceBranch: branch,
|
||||
branch: application.branch,
|
||||
pullmergeRequestId: pullmergeRequestId?.toString(),
|
||||
forceRebuild,
|
||||
destinationDockerId: application.destinationDocker?.id,
|
||||
gitSourceId: application.gitSource?.id,
|
||||
githubAppId: application.gitSource?.githubApp?.id,
|
||||
gitlabAppId: application.gitSource?.gitlabApp?.id,
|
||||
status: "queued",
|
||||
type: pullmergeRequestId ? application.gitSource?.githubApp?.id ? "manual_pr" : "manual_mr" : "manual"
|
||||
}
|
||||
});
|
||||
} else {
|
||||
await import_prisma.prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
applicationId: id,
|
||||
branch: "latest",
|
||||
forceRebuild,
|
||||
destinationDockerId: application.destinationDocker?.id,
|
||||
status: "queued",
|
||||
type: "manual"
|
||||
}
|
||||
});
|
||||
}
|
||||
return buildId;
|
||||
}
|
||||
throw { status: 500, message: "Application cannot be deployed." };
|
||||
}
|
||||
async function generateConfigHash(id, buildPack, port, exposePort, installCommand, buildCommand, startCommand) {
|
||||
const configHash = import_node_crypto.default.createHash("sha256").update(
|
||||
JSON.stringify({
|
||||
buildPack,
|
||||
port,
|
||||
exposePort,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand
|
||||
})
|
||||
).digest("hex");
|
||||
return await import_prisma.prisma.application.update({ where: { id }, data: { configHash } });
|
||||
}
|
||||
async function getApplicationFromDB(id, teamId) {
|
||||
let application = await import_prisma.prisma.application.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: {
|
||||
destinationDocker: true,
|
||||
settings: true,
|
||||
gitSource: { include: { githubApp: true, gitlabApp: true } },
|
||||
secrets: true,
|
||||
persistentStorage: true,
|
||||
connectedDatabase: true,
|
||||
previewApplication: true,
|
||||
dockerRegistry: true
|
||||
}
|
||||
});
|
||||
if (!application) {
|
||||
throw { status: 404, message: "Application not found." };
|
||||
}
|
||||
application = decryptApplication(application);
|
||||
const buildPack = application?.buildPack || null;
|
||||
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(buildPack);
|
||||
if (application && !application.baseImage) {
|
||||
application.baseImage = baseImage;
|
||||
}
|
||||
if (application && !application.baseBuildImage) {
|
||||
application.baseBuildImage = baseBuildImage;
|
||||
}
|
||||
return { ...application, baseBuildImages, baseImages };
|
||||
}
|
||||
function decryptApplication(application) {
|
||||
if (application) {
|
||||
if (application?.gitSource?.githubApp?.clientSecret) {
|
||||
application.gitSource.githubApp.clientSecret = (0, import_common.decrypt)(application.gitSource.githubApp.clientSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.webhookSecret) {
|
||||
application.gitSource.githubApp.webhookSecret = (0, import_common.decrypt)(application.gitSource.githubApp.webhookSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.privateKey) {
|
||||
application.gitSource.githubApp.privateKey = (0, import_common.decrypt)(application.gitSource.githubApp.privateKey) || null;
|
||||
}
|
||||
if (application?.gitSource?.gitlabApp?.appSecret) {
|
||||
application.gitSource.gitlabApp.appSecret = (0, import_common.decrypt)(application.gitSource.gitlabApp.appSecret) || null;
|
||||
}
|
||||
if (application?.secrets.length > 0) {
|
||||
application.secrets = application.secrets.map((s) => {
|
||||
s.value = (0, import_common.decrypt)(s.value) || null;
|
||||
return s;
|
||||
});
|
||||
}
|
||||
return application;
|
||||
}
|
||||
}
|
||||
const staticApps = ["static", "react", "vuejs", "svelte", "gatsby", "astro", "eleventy"];
|
||||
const nodeBased = [
|
||||
"react",
|
||||
"preact",
|
||||
"vuejs",
|
||||
"svelte",
|
||||
"gatsby",
|
||||
"astro",
|
||||
"eleventy",
|
||||
"node",
|
||||
"nestjs",
|
||||
"nuxtjs",
|
||||
"nextjs"
|
||||
];
|
||||
function setDefaultBaseImage(buildPack, deploymentType = null) {
|
||||
const nodeVersions = [
|
||||
{
|
||||
value: "node:lts",
|
||||
label: "node:lts"
|
||||
},
|
||||
{
|
||||
value: "node:18",
|
||||
label: "node:18"
|
||||
},
|
||||
{
|
||||
value: "node:17",
|
||||
label: "node:17"
|
||||
},
|
||||
{
|
||||
value: "node:16",
|
||||
label: "node:16"
|
||||
},
|
||||
{
|
||||
value: "node:14",
|
||||
label: "node:14"
|
||||
},
|
||||
{
|
||||
value: "node:12",
|
||||
label: "node:12"
|
||||
}
|
||||
];
|
||||
const staticVersions = [
|
||||
{
|
||||
value: "webdevops/nginx:alpine",
|
||||
label: "webdevops/nginx:alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/apache:alpine",
|
||||
label: "webdevops/apache:alpine"
|
||||
},
|
||||
{
|
||||
value: "nginx:alpine",
|
||||
label: "nginx:alpine"
|
||||
},
|
||||
{
|
||||
value: "httpd:alpine",
|
||||
label: "httpd:alpine (Apache)"
|
||||
}
|
||||
];
|
||||
const rustVersions = [
|
||||
{
|
||||
value: "rust:latest",
|
||||
label: "rust:latest"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60",
|
||||
label: "rust:1.60"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-buster",
|
||||
label: "rust:1.60-buster"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-bullseye",
|
||||
label: "rust:1.60-bullseye"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-slim-buster",
|
||||
label: "rust:1.60-slim-buster"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-slim-bullseye",
|
||||
label: "rust:1.60-slim-bullseye"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-alpine3.14",
|
||||
label: "rust:1.60-alpine3.14"
|
||||
},
|
||||
{
|
||||
value: "rust:1.60-alpine3.15",
|
||||
label: "rust:1.60-alpine3.15"
|
||||
}
|
||||
];
|
||||
const phpVersions = [
|
||||
{
|
||||
value: "webdevops/php-apache:8.2",
|
||||
label: "webdevops/php-apache:8.2"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.2",
|
||||
label: "webdevops/php-nginx:8.2"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.1",
|
||||
label: "webdevops/php-apache:8.1"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.1",
|
||||
label: "webdevops/php-nginx:8.1"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.0",
|
||||
label: "webdevops/php-apache:8.0"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.0",
|
||||
label: "webdevops/php-nginx:8.0"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.4",
|
||||
label: "webdevops/php-apache:7.4"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.4",
|
||||
label: "webdevops/php-nginx:7.4"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.3",
|
||||
label: "webdevops/php-apache:7.3"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.3",
|
||||
label: "webdevops/php-nginx:7.3"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.2",
|
||||
label: "webdevops/php-apache:7.2"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.2",
|
||||
label: "webdevops/php-nginx:7.2"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.1",
|
||||
label: "webdevops/php-apache:7.1"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.1",
|
||||
label: "webdevops/php-nginx:7.1"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.0",
|
||||
label: "webdevops/php-apache:7.0"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.0",
|
||||
label: "webdevops/php-nginx:7.0"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:5.6",
|
||||
label: "webdevops/php-apache:5.6"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:5.6",
|
||||
label: "webdevops/php-nginx:5.6"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.2-alpine",
|
||||
label: "webdevops/php-apache:8.2-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.2-alpine",
|
||||
label: "webdevops/php-nginx:8.2-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.1-alpine",
|
||||
label: "webdevops/php-apache:8.1-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.1-alpine",
|
||||
label: "webdevops/php-nginx:8.1-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:8.0-alpine",
|
||||
label: "webdevops/php-apache:8.0-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:8.0-alpine",
|
||||
label: "webdevops/php-nginx:8.0-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.4-alpine",
|
||||
label: "webdevops/php-apache:7.4-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.4-alpine",
|
||||
label: "webdevops/php-nginx:7.4-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.3-alpine",
|
||||
label: "webdevops/php-apache:7.3-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.3-alpine",
|
||||
label: "webdevops/php-nginx:7.3-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.2-alpine",
|
||||
label: "webdevops/php-apache:7.2-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-nginx:7.2-alpine",
|
||||
label: "webdevops/php-nginx:7.2-alpine"
|
||||
},
|
||||
{
|
||||
value: "webdevops/php-apache:7.1-alpine",
|
||||
label: "webdevops/php-apache:7.1-alpine"
|
||||
},
|
||||
{
|
||||
value: "php:8.1-fpm",
|
||||
label: "php:8.1-fpm"
|
||||
},
|
||||
{
|
||||
value: "php:8.0-fpm",
|
||||
label: "php:8.0-fpm"
|
||||
},
|
||||
{
|
||||
value: "php:8.1-fpm-alpine",
|
||||
label: "php:8.1-fpm-alpine"
|
||||
},
|
||||
{
|
||||
value: "php:8.0-fpm-alpine",
|
||||
label: "php:8.0-fpm-alpine"
|
||||
}
|
||||
];
|
||||
const pythonVersions = [
|
||||
{
|
||||
value: "python:3.10-alpine",
|
||||
label: "python:3.10-alpine"
|
||||
},
|
||||
{
|
||||
value: "python:3.10-buster",
|
||||
label: "python:3.10-buster"
|
||||
},
|
||||
{
|
||||
value: "python:3.10-bullseye",
|
||||
label: "python:3.10-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.10-slim-bullseye",
|
||||
label: "python:3.10-slim-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.9-alpine",
|
||||
label: "python:3.9-alpine"
|
||||
},
|
||||
{
|
||||
value: "python:3.9-buster",
|
||||
label: "python:3.9-buster"
|
||||
},
|
||||
{
|
||||
value: "python:3.9-bullseye",
|
||||
label: "python:3.9-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.9-slim-bullseye",
|
||||
label: "python:3.9-slim-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.8-alpine",
|
||||
label: "python:3.8-alpine"
|
||||
},
|
||||
{
|
||||
value: "python:3.8-buster",
|
||||
label: "python:3.8-buster"
|
||||
},
|
||||
{
|
||||
value: "python:3.8-bullseye",
|
||||
label: "python:3.8-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.8-slim-bullseye",
|
||||
label: "python:3.8-slim-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.7-alpine",
|
||||
label: "python:3.7-alpine"
|
||||
},
|
||||
{
|
||||
value: "python:3.7-buster",
|
||||
label: "python:3.7-buster"
|
||||
},
|
||||
{
|
||||
value: "python:3.7-bullseye",
|
||||
label: "python:3.7-bullseye"
|
||||
},
|
||||
{
|
||||
value: "python:3.7-slim-bullseye",
|
||||
label: "python:3.7-slim-bullseye"
|
||||
}
|
||||
];
|
||||
const herokuVersions = [
|
||||
{
|
||||
value: "heroku/builder:22",
|
||||
label: "heroku/builder:22"
|
||||
},
|
||||
{
|
||||
value: "heroku/buildpacks:20",
|
||||
label: "heroku/buildpacks:20"
|
||||
},
|
||||
{
|
||||
value: "heroku/builder-classic:22",
|
||||
label: "heroku/builder-classic:22"
|
||||
}
|
||||
];
|
||||
let payload = {
|
||||
baseImage: null,
|
||||
baseBuildImage: null,
|
||||
baseImages: [],
|
||||
baseBuildImages: []
|
||||
};
|
||||
if (nodeBased.includes(buildPack)) {
|
||||
if (deploymentType === "static") {
|
||||
payload.baseImage = (0, import_common.isARM)(process.arch) ? "nginx:alpine" : "webdevops/nginx:alpine";
|
||||
payload.baseImages = (0, import_common.isARM)(process.arch) ? staticVersions.filter((version) => !version.value.includes("webdevops")) : staticVersions;
|
||||
payload.baseBuildImage = "node:lts";
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
} else {
|
||||
payload.baseImage = "node:lts";
|
||||
payload.baseImages = nodeVersions;
|
||||
payload.baseBuildImage = "node:lts";
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
}
|
||||
if (staticApps.includes(buildPack)) {
|
||||
payload.baseImage = (0, import_common.isARM)(process.arch) ? "nginx:alpine" : "webdevops/nginx:alpine";
|
||||
payload.baseImages = (0, import_common.isARM)(process.arch) ? staticVersions.filter((version) => !version.value.includes("webdevops")) : staticVersions;
|
||||
payload.baseBuildImage = "node:lts";
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === "python") {
|
||||
payload.baseImage = "python:3.10-alpine";
|
||||
payload.baseImages = pythonVersions;
|
||||
}
|
||||
if (buildPack === "rust") {
|
||||
payload.baseImage = "rust:latest";
|
||||
payload.baseBuildImage = "rust:latest";
|
||||
payload.baseImages = rustVersions;
|
||||
payload.baseBuildImages = rustVersions;
|
||||
}
|
||||
if (buildPack === "deno") {
|
||||
payload.baseImage = "denoland/deno:latest";
|
||||
}
|
||||
if (buildPack === "php") {
|
||||
payload.baseImage = (0, import_common.isARM)(process.arch) ? "php:8.1-fpm-alpine" : "webdevops/php-apache:8.2-alpine";
|
||||
payload.baseImages = (0, import_common.isARM)(process.arch) ? phpVersions.filter((version) => !version.value.includes("webdevops")) : phpVersions;
|
||||
}
|
||||
if (buildPack === "laravel") {
|
||||
payload.baseImage = (0, import_common.isARM)(process.arch) ? "php:8.1-fpm-alpine" : "webdevops/php-apache:8.2-alpine";
|
||||
payload.baseImages = (0, import_common.isARM)(process.arch) ? phpVersions.filter((version) => !version.value.includes("webdevops")) : phpVersions;
|
||||
payload.baseBuildImage = "node:18";
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === "heroku") {
|
||||
payload.baseImage = "heroku/buildpacks:20";
|
||||
payload.baseImages = herokuVersions;
|
||||
}
|
||||
return payload;
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
deployApplication,
|
||||
generateConfigHash,
|
||||
getApplicationFromDB,
|
||||
setDefaultBaseImage
|
||||
});
|
||||
201
apps/trpc-experimental/server/build/trpc/routers/auth.js
Normal file
201
apps/trpc-experimental/server/build/trpc/routers/auth.js
Normal file
@@ -0,0 +1,201 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var auth_exports = {};
|
||||
__export(auth_exports, {
|
||||
authRouter: () => authRouter
|
||||
});
|
||||
module.exports = __toCommonJS(auth_exports);
|
||||
var import_zod = require("zod");
|
||||
var import_trpc = require("../trpc");
|
||||
var import_server = require("@trpc/server");
|
||||
var import_common = require("../../lib/common");
|
||||
var import_env = require("../../env");
|
||||
var import_jsonwebtoken = __toESM(require("jsonwebtoken"));
|
||||
var import_prisma = require("../../prisma");
|
||||
var import_cuid = __toESM(require("cuid"));
|
||||
const authRouter = (0, import_trpc.router)({
|
||||
register: import_trpc.publicProcedure.input(
|
||||
import_zod.z.object({
|
||||
email: import_zod.z.string(),
|
||||
password: import_zod.z.string()
|
||||
})
|
||||
).mutation(async ({ input }) => {
|
||||
const { email, password } = input;
|
||||
const userFound = await import_prisma.prisma.user.findUnique({
|
||||
where: { email },
|
||||
include: { teams: true, permission: true }
|
||||
});
|
||||
if (userFound) {
|
||||
throw new import_server.TRPCError({
|
||||
code: "BAD_REQUEST",
|
||||
message: "User already exists."
|
||||
});
|
||||
}
|
||||
const settings = await (0, import_common.listSettings)();
|
||||
if (!settings?.isRegistrationEnabled) {
|
||||
throw new import_server.TRPCError({
|
||||
code: "FORBIDDEN",
|
||||
message: "Registration is disabled."
|
||||
});
|
||||
}
|
||||
const usersCount = await import_prisma.prisma.user.count();
|
||||
const uid = usersCount === 0 ? "0" : (0, import_cuid.default)();
|
||||
const permission = "owner";
|
||||
const isAdmin = true;
|
||||
const hashedPassword = await (0, import_common.hashPassword)(password);
|
||||
if (usersCount === 0) {
|
||||
await import_prisma.prisma.user.create({
|
||||
data: {
|
||||
id: uid,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
type: "email",
|
||||
teams: {
|
||||
create: {
|
||||
id: uid,
|
||||
name: (0, import_common.uniqueName)(),
|
||||
destinationDocker: { connect: { network: "coolify" } }
|
||||
}
|
||||
},
|
||||
permission: { create: { teamId: uid, permission } }
|
||||
},
|
||||
include: { teams: true }
|
||||
});
|
||||
await import_prisma.prisma.setting.update({
|
||||
where: { id: "0" },
|
||||
data: { isRegistrationEnabled: false }
|
||||
});
|
||||
} else {
|
||||
await import_prisma.prisma.user.create({
|
||||
data: {
|
||||
id: uid,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
type: "email",
|
||||
teams: {
|
||||
create: {
|
||||
id: uid,
|
||||
name: (0, import_common.uniqueName)()
|
||||
}
|
||||
},
|
||||
permission: { create: { teamId: uid, permission } }
|
||||
},
|
||||
include: { teams: true }
|
||||
});
|
||||
}
|
||||
const payload = {
|
||||
userId: uid,
|
||||
teamId: uid,
|
||||
permission,
|
||||
isAdmin
|
||||
};
|
||||
return {
|
||||
...payload,
|
||||
token: import_jsonwebtoken.default.sign(payload, import_env.env.COOLIFY_SECRET_KEY)
|
||||
};
|
||||
}),
|
||||
login: import_trpc.publicProcedure.input(
|
||||
import_zod.z.object({
|
||||
email: import_zod.z.string(),
|
||||
password: import_zod.z.string()
|
||||
})
|
||||
).mutation(async ({ input }) => {
|
||||
const { email, password } = input;
|
||||
const userFound = await import_prisma.prisma.user.findUnique({
|
||||
where: { email },
|
||||
include: { teams: true, permission: true }
|
||||
});
|
||||
if (!userFound) {
|
||||
throw new import_server.TRPCError({
|
||||
code: "BAD_REQUEST",
|
||||
message: "User already exists."
|
||||
});
|
||||
}
|
||||
if (userFound.type === "email") {
|
||||
if (userFound.password === "RESETME") {
|
||||
const hashedPassword = await (0, import_common.hashPassword)(password);
|
||||
if (userFound.updatedAt < new Date(Date.now() - 1e3 * 60 * 10)) {
|
||||
if (userFound.id === "0") {
|
||||
await import_prisma.prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: "RESETME" }
|
||||
});
|
||||
} else {
|
||||
await import_prisma.prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: "RESETTIMEOUT" }
|
||||
});
|
||||
}
|
||||
} else {
|
||||
await import_prisma.prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: hashedPassword }
|
||||
});
|
||||
const payload2 = {
|
||||
userId: userFound.id,
|
||||
teamId: userFound.id,
|
||||
permission: userFound.permission,
|
||||
isAdmin: true
|
||||
};
|
||||
return {
|
||||
...payload2,
|
||||
token: import_jsonwebtoken.default.sign(payload2, import_env.env.COOLIFY_SECRET_KEY)
|
||||
};
|
||||
}
|
||||
}
|
||||
if (!userFound.password) {
|
||||
throw new import_server.TRPCError({
|
||||
code: "BAD_REQUEST",
|
||||
message: "Something went wrong. Please try again later."
|
||||
});
|
||||
}
|
||||
const passwordMatch = (0, import_common.comparePassword)(password, userFound.password);
|
||||
if (!passwordMatch) {
|
||||
throw new import_server.TRPCError({
|
||||
code: "BAD_REQUEST",
|
||||
message: "Incorrect password."
|
||||
});
|
||||
}
|
||||
const payload = {
|
||||
userId: userFound.id,
|
||||
teamId: userFound.id,
|
||||
permission: userFound.permission,
|
||||
isAdmin: true
|
||||
};
|
||||
return {
|
||||
...payload,
|
||||
token: import_jsonwebtoken.default.sign(payload, import_env.env.COOLIFY_SECRET_KEY)
|
||||
};
|
||||
}
|
||||
throw new import_server.TRPCError({
|
||||
code: "BAD_REQUEST",
|
||||
message: "Not implemented yet."
|
||||
});
|
||||
})
|
||||
});
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
authRouter
|
||||
});
|
||||
@@ -0,0 +1,87 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var dashboard_exports = {};
|
||||
__export(dashboard_exports, {
|
||||
dashboardRouter: () => dashboardRouter
|
||||
});
|
||||
module.exports = __toCommonJS(dashboard_exports);
|
||||
var import_trpc = require("../trpc");
|
||||
var import_common = require("../../lib/common");
|
||||
var import_prisma = require("../../prisma");
|
||||
const dashboardRouter = (0, import_trpc.router)({
|
||||
resources: import_trpc.privateProcedure.query(async ({ ctx }) => {
|
||||
const id = ctx.user?.teamId === "0" ? void 0 : ctx.user?.teamId;
|
||||
let applications = await import_prisma.prisma.application.findMany({
|
||||
where: { teams: { some: { id } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true }
|
||||
});
|
||||
const databases = await import_prisma.prisma.database.findMany({
|
||||
where: { teams: { some: { id } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true }
|
||||
});
|
||||
const services = await import_prisma.prisma.service.findMany({
|
||||
where: { teams: { some: { id } } },
|
||||
include: { destinationDocker: true, teams: true }
|
||||
});
|
||||
const gitSources = await import_prisma.prisma.gitSource.findMany({
|
||||
where: {
|
||||
OR: [{ teams: { some: { id } } }, { isSystemWide: true }]
|
||||
},
|
||||
include: { teams: true }
|
||||
});
|
||||
const destinations = await import_prisma.prisma.destinationDocker.findMany({
|
||||
where: { teams: { some: { id } } },
|
||||
include: { teams: true }
|
||||
});
|
||||
const settings = await (0, import_common.listSettings)();
|
||||
let foundUnconfiguredApplication = false;
|
||||
for (const application of applications) {
|
||||
if ((!application.buildPack || !application.branch) && !application.simpleDockerfile || !application.destinationDockerId || !application.settings?.isBot && !application?.fqdn && application.buildPack !== "compose") {
|
||||
foundUnconfiguredApplication = true;
|
||||
}
|
||||
}
|
||||
let foundUnconfiguredService = false;
|
||||
for (const service of services) {
|
||||
if (!service.fqdn) {
|
||||
foundUnconfiguredService = true;
|
||||
}
|
||||
}
|
||||
let foundUnconfiguredDatabase = false;
|
||||
for (const database of databases) {
|
||||
if (!database.version) {
|
||||
foundUnconfiguredDatabase = true;
|
||||
}
|
||||
}
|
||||
return {
|
||||
foundUnconfiguredApplication,
|
||||
foundUnconfiguredDatabase,
|
||||
foundUnconfiguredService,
|
||||
applications,
|
||||
databases,
|
||||
services,
|
||||
gitSources,
|
||||
destinations,
|
||||
settings
|
||||
};
|
||||
})
|
||||
});
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
dashboardRouter
|
||||
});
|
||||
@@ -0,0 +1,384 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var databases_exports = {};
|
||||
__export(databases_exports, {
|
||||
databasesRouter: () => databasesRouter
|
||||
});
|
||||
module.exports = __toCommonJS(databases_exports);
|
||||
var import_zod = require("zod");
|
||||
var import_promises = __toESM(require("fs/promises"));
|
||||
var import_trpc = require("../../trpc");
|
||||
var import_common = require("../../../lib/common");
|
||||
var import_prisma = require("../../../prisma");
|
||||
var import_executeCommand = require("../../../lib/executeCommand");
|
||||
var import_docker = require("../../../lib/docker");
|
||||
var import_lib = require("./lib");
|
||||
var import_js_yaml = __toESM(require("js-yaml"));
|
||||
var import_lib2 = require("../services/lib");
|
||||
const databasesRouter = (0, import_trpc.router)({
|
||||
usage: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string()
|
||||
})
|
||||
).query(async ({ ctx, input }) => {
|
||||
const teamId = ctx.user?.teamId;
|
||||
const { id } = input;
|
||||
let usage = {};
|
||||
const database = await import_prisma.prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword)
|
||||
database.dbUserPassword = (0, import_common.decrypt)(database.dbUserPassword);
|
||||
if (database.rootUserPassword)
|
||||
database.rootUserPassword = (0, import_common.decrypt)(database.rootUserPassword);
|
||||
if (database.destinationDockerId) {
|
||||
[usage] = await Promise.all([(0, import_common.getContainerUsage)(database.destinationDocker.id, id)]);
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
usage
|
||||
}
|
||||
};
|
||||
}),
|
||||
save: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string()
|
||||
})
|
||||
).mutation(async ({ ctx, input }) => {
|
||||
const teamId = ctx.user?.teamId;
|
||||
const {
|
||||
id,
|
||||
name,
|
||||
defaultDatabase,
|
||||
dbUser,
|
||||
dbUserPassword,
|
||||
rootUser,
|
||||
rootUserPassword,
|
||||
version,
|
||||
isRunning
|
||||
} = input;
|
||||
const database = await import_prisma.prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword)
|
||||
database.dbUserPassword = (0, import_common.decrypt)(database.dbUserPassword);
|
||||
if (database.rootUserPassword)
|
||||
database.rootUserPassword = (0, import_common.decrypt)(database.rootUserPassword);
|
||||
if (isRunning) {
|
||||
if (database.dbUserPassword !== dbUserPassword) {
|
||||
await (0, import_lib.updatePasswordInDb)(database, dbUser, dbUserPassword, false);
|
||||
} else if (database.rootUserPassword !== rootUserPassword) {
|
||||
await (0, import_lib.updatePasswordInDb)(database, rootUser, rootUserPassword, true);
|
||||
}
|
||||
}
|
||||
const encryptedDbUserPassword = dbUserPassword && (0, import_common.encrypt)(dbUserPassword);
|
||||
const encryptedRootUserPassword = rootUserPassword && (0, import_common.encrypt)(rootUserPassword);
|
||||
await import_prisma.prisma.database.update({
|
||||
where: { id },
|
||||
data: {
|
||||
name,
|
||||
defaultDatabase,
|
||||
dbUser,
|
||||
dbUserPassword: encryptedDbUserPassword,
|
||||
rootUser,
|
||||
rootUserPassword: encryptedRootUserPassword,
|
||||
version
|
||||
}
|
||||
});
|
||||
}),
|
||||
saveSettings: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
isPublic: import_zod.z.boolean(),
|
||||
appendOnly: import_zod.z.boolean().default(true)
|
||||
})
|
||||
).mutation(async ({ ctx, input }) => {
|
||||
const teamId = ctx.user?.teamId;
|
||||
const { id, isPublic, appendOnly = true } = input;
|
||||
let publicPort = null;
|
||||
const {
|
||||
destinationDocker: { remoteEngine, engine, remoteIpAddress }
|
||||
} = await import_prisma.prisma.database.findUnique({ where: { id }, include: { destinationDocker: true } });
|
||||
if (isPublic) {
|
||||
publicPort = await (0, import_lib2.getFreePublicPort)({ id, remoteEngine, engine, remoteIpAddress });
|
||||
}
|
||||
await import_prisma.prisma.database.update({
|
||||
where: { id },
|
||||
data: {
|
||||
settings: {
|
||||
upsert: { update: { isPublic, appendOnly }, create: { isPublic, appendOnly } }
|
||||
}
|
||||
}
|
||||
});
|
||||
const database = await import_prisma.prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
const { arch } = await (0, import_common.listSettings)();
|
||||
if (database.dbUserPassword)
|
||||
database.dbUserPassword = (0, import_common.decrypt)(database.dbUserPassword);
|
||||
if (database.rootUserPassword)
|
||||
database.rootUserPassword = (0, import_common.decrypt)(database.rootUserPassword);
|
||||
const { destinationDockerId, destinationDocker, publicPort: oldPublicPort } = database;
|
||||
const { privatePort } = (0, import_lib.generateDatabaseConfiguration)(database, arch);
|
||||
if (destinationDockerId) {
|
||||
if (isPublic) {
|
||||
await import_prisma.prisma.database.update({ where: { id }, data: { publicPort } });
|
||||
await (0, import_common.startTraefikTCPProxy)(destinationDocker, id, publicPort, privatePort);
|
||||
} else {
|
||||
await import_prisma.prisma.database.update({ where: { id }, data: { publicPort: null } });
|
||||
await (0, import_docker.stopTcpHttpProxy)(id, destinationDocker, oldPublicPort);
|
||||
}
|
||||
}
|
||||
return { publicPort };
|
||||
}),
|
||||
saveSecret: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
name: import_zod.z.string(),
|
||||
value: import_zod.z.string(),
|
||||
isNew: import_zod.z.boolean().default(true)
|
||||
})
|
||||
).mutation(async ({ ctx, input }) => {
|
||||
let { id, name, value, isNew } = input;
|
||||
if (isNew) {
|
||||
const found = await import_prisma.prisma.databaseSecret.findFirst({ where: { name, databaseId: id } });
|
||||
if (found) {
|
||||
throw `Secret ${name} already exists.`;
|
||||
} else {
|
||||
value = (0, import_common.encrypt)(value.trim());
|
||||
await import_prisma.prisma.databaseSecret.create({
|
||||
data: { name, value, database: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
} else {
|
||||
value = (0, import_common.encrypt)(value.trim());
|
||||
const found = await import_prisma.prisma.databaseSecret.findFirst({ where: { databaseId: id, name } });
|
||||
if (found) {
|
||||
await import_prisma.prisma.databaseSecret.updateMany({
|
||||
where: { databaseId: id, name },
|
||||
data: { value }
|
||||
});
|
||||
} else {
|
||||
await import_prisma.prisma.databaseSecret.create({
|
||||
data: { name, value, database: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
start: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).mutation(async ({ ctx, input }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const database = await import_prisma.prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { destinationDocker: true, settings: true, databaseSecret: true }
|
||||
});
|
||||
const { arch } = await (0, import_common.listSettings)();
|
||||
if (database.dbUserPassword)
|
||||
database.dbUserPassword = (0, import_common.decrypt)(database.dbUserPassword);
|
||||
if (database.rootUserPassword)
|
||||
database.rootUserPassword = (0, import_common.decrypt)(database.rootUserPassword);
|
||||
const {
|
||||
type,
|
||||
destinationDockerId,
|
||||
destinationDocker,
|
||||
publicPort,
|
||||
settings: { isPublic },
|
||||
databaseSecret
|
||||
} = database;
|
||||
const { privatePort, command, environmentVariables, image, volume, ulimits } = (0, import_lib.generateDatabaseConfiguration)(database, arch);
|
||||
const network = destinationDockerId && destinationDocker.network;
|
||||
const volumeName = volume.split(":")[0];
|
||||
const labels = await (0, import_lib.makeLabelForStandaloneDatabase)({ id, image, volume });
|
||||
const { workdir } = await (0, import_common.createDirectories)({ repository: type, buildId: id });
|
||||
if (databaseSecret.length > 0) {
|
||||
databaseSecret.forEach((secret) => {
|
||||
environmentVariables[secret.name] = (0, import_common.decrypt)(secret.value);
|
||||
});
|
||||
}
|
||||
const composeFile = {
|
||||
version: "3.8",
|
||||
services: {
|
||||
[id]: {
|
||||
container_name: id,
|
||||
image,
|
||||
command,
|
||||
environment: environmentVariables,
|
||||
volumes: [volume],
|
||||
ulimits,
|
||||
labels,
|
||||
...(0, import_docker.defaultComposeConfiguration)(network)
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: {
|
||||
[volumeName]: {
|
||||
name: volumeName
|
||||
}
|
||||
}
|
||||
};
|
||||
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
||||
await import_promises.default.writeFile(composeFileDestination, import_js_yaml.default.dump(composeFile));
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker compose -f ${composeFileDestination} up -d`
|
||||
});
|
||||
if (isPublic)
|
||||
await (0, import_common.startTraefikTCPProxy)(destinationDocker, id, publicPort, privatePort);
|
||||
}),
|
||||
stop: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).mutation(async ({ ctx, input }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const database = await import_prisma.prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword)
|
||||
database.dbUserPassword = (0, import_common.decrypt)(database.dbUserPassword);
|
||||
if (database.rootUserPassword)
|
||||
database.rootUserPassword = (0, import_common.decrypt)(database.rootUserPassword);
|
||||
const everStarted = await (0, import_docker.stopDatabaseContainer)(database);
|
||||
if (everStarted)
|
||||
await (0, import_docker.stopTcpHttpProxy)(id, database.destinationDocker, database.publicPort);
|
||||
await import_prisma.prisma.database.update({
|
||||
where: { id },
|
||||
data: {
|
||||
settings: { upsert: { update: { isPublic: false }, create: { isPublic: false } } }
|
||||
}
|
||||
});
|
||||
await import_prisma.prisma.database.update({ where: { id }, data: { publicPort: null } });
|
||||
}),
|
||||
getDatabaseById: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).query(async ({ ctx, input }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const database = await import_prisma.prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (!database) {
|
||||
throw { status: 404, message: "Database not found." };
|
||||
}
|
||||
const settings = await (0, import_common.listSettings)();
|
||||
if (database.dbUserPassword)
|
||||
database.dbUserPassword = (0, import_common.decrypt)(database.dbUserPassword);
|
||||
if (database.rootUserPassword)
|
||||
database.rootUserPassword = (0, import_common.decrypt)(database.rootUserPassword);
|
||||
const configuration = (0, import_lib.generateDatabaseConfiguration)(database, settings.arch);
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
privatePort: configuration?.privatePort,
|
||||
database,
|
||||
versions: await (0, import_lib.getDatabaseVersions)(database.type, settings.arch),
|
||||
settings
|
||||
}
|
||||
};
|
||||
}),
|
||||
status: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).query(async ({ ctx, input }) => {
|
||||
const id = input.id;
|
||||
const teamId = ctx.user?.teamId;
|
||||
let isRunning = false;
|
||||
const database = await import_prisma.prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database) {
|
||||
const { destinationDockerId, destinationDocker } = database;
|
||||
if (destinationDockerId) {
|
||||
try {
|
||||
const { stdout } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker inspect --format '{{json .State}}' ${id}`
|
||||
});
|
||||
if (JSON.parse(stdout).Running) {
|
||||
isRunning = true;
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
isRunning
|
||||
}
|
||||
};
|
||||
}),
|
||||
cleanup: import_trpc.privateProcedure.query(async ({ ctx }) => {
|
||||
const teamId = ctx.user?.teamId;
|
||||
let databases = await import_prisma.prisma.database.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true }
|
||||
});
|
||||
for (const database of databases) {
|
||||
if (!database?.version) {
|
||||
const { id } = database;
|
||||
if (database.destinationDockerId) {
|
||||
const everStarted = await (0, import_docker.stopDatabaseContainer)(database);
|
||||
if (everStarted)
|
||||
await (0, import_docker.stopTcpHttpProxy)(id, database.destinationDocker, database.publicPort);
|
||||
}
|
||||
await import_prisma.prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
|
||||
await import_prisma.prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
|
||||
await import_prisma.prisma.database.delete({ where: { id } });
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}),
|
||||
delete: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string(), force: import_zod.z.boolean().default(false) })).mutation(async ({ ctx, input }) => {
|
||||
const { id, force } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const database = await import_prisma.prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (!force) {
|
||||
if (database.dbUserPassword)
|
||||
database.dbUserPassword = (0, import_common.decrypt)(database.dbUserPassword);
|
||||
if (database.rootUserPassword)
|
||||
database.rootUserPassword = (0, import_common.decrypt)(database.rootUserPassword);
|
||||
if (database.destinationDockerId) {
|
||||
const everStarted = await (0, import_docker.stopDatabaseContainer)(database);
|
||||
if (everStarted)
|
||||
await (0, import_docker.stopTcpHttpProxy)(id, database.destinationDocker, database.publicPort);
|
||||
}
|
||||
}
|
||||
await import_prisma.prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
|
||||
await import_prisma.prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
|
||||
await import_prisma.prisma.database.delete({ where: { id } });
|
||||
return {};
|
||||
})
|
||||
});
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
databasesRouter
|
||||
});
|
||||
@@ -0,0 +1,316 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var lib_exports = {};
|
||||
__export(lib_exports, {
|
||||
generateDatabaseConfiguration: () => generateDatabaseConfiguration,
|
||||
getDatabaseImage: () => getDatabaseImage,
|
||||
getDatabaseVersions: () => getDatabaseVersions,
|
||||
makeLabelForStandaloneDatabase: () => makeLabelForStandaloneDatabase,
|
||||
supportedDatabaseTypesAndVersions: () => supportedDatabaseTypesAndVersions,
|
||||
updatePasswordInDb: () => updatePasswordInDb
|
||||
});
|
||||
module.exports = __toCommonJS(lib_exports);
|
||||
var import_common = require("../../../lib/common");
|
||||
var import_executeCommand = require("../../../lib/executeCommand");
|
||||
var import_prisma = require("../../../prisma");
|
||||
const supportedDatabaseTypesAndVersions = [
|
||||
{
|
||||
name: "mongodb",
|
||||
fancyName: "MongoDB",
|
||||
baseImage: "bitnami/mongodb",
|
||||
baseImageARM: "mongo",
|
||||
versions: ["5.0", "4.4", "4.2"],
|
||||
versionsARM: ["5.0", "4.4", "4.2"]
|
||||
},
|
||||
{
|
||||
name: "mysql",
|
||||
fancyName: "MySQL",
|
||||
baseImage: "bitnami/mysql",
|
||||
baseImageARM: "mysql",
|
||||
versions: ["8.0", "5.7"],
|
||||
versionsARM: ["8.0", "5.7"]
|
||||
},
|
||||
{
|
||||
name: "mariadb",
|
||||
fancyName: "MariaDB",
|
||||
baseImage: "bitnami/mariadb",
|
||||
baseImageARM: "mariadb",
|
||||
versions: ["10.8", "10.7", "10.6", "10.5", "10.4", "10.3", "10.2"],
|
||||
versionsARM: ["10.8", "10.7", "10.6", "10.5", "10.4", "10.3", "10.2"]
|
||||
},
|
||||
{
|
||||
name: "postgresql",
|
||||
fancyName: "PostgreSQL",
|
||||
baseImage: "bitnami/postgresql",
|
||||
baseImageARM: "postgres",
|
||||
versions: ["14.5.0", "13.8.0", "12.12.0", "11.17.0", "10.22.0"],
|
||||
versionsARM: ["14.5", "13.8", "12.12", "11.17", "10.22"]
|
||||
},
|
||||
{
|
||||
name: "redis",
|
||||
fancyName: "Redis",
|
||||
baseImage: "bitnami/redis",
|
||||
baseImageARM: "redis",
|
||||
versions: ["7.0", "6.2", "6.0", "5.0"],
|
||||
versionsARM: ["7.0", "6.2", "6.0", "5.0"]
|
||||
},
|
||||
{
|
||||
name: "couchdb",
|
||||
fancyName: "CouchDB",
|
||||
baseImage: "bitnami/couchdb",
|
||||
baseImageARM: "couchdb",
|
||||
versions: ["3.2.2", "3.1.2", "2.3.1"],
|
||||
versionsARM: ["3.2.2", "3.1.2", "2.3.1"]
|
||||
},
|
||||
{
|
||||
name: "edgedb",
|
||||
fancyName: "EdgeDB",
|
||||
baseImage: "edgedb/edgedb",
|
||||
versions: ["latest", "2.1", "2.0", "1.4"]
|
||||
}
|
||||
];
|
||||
function getDatabaseImage(type, arch) {
|
||||
const found = supportedDatabaseTypesAndVersions.find((t) => t.name === type);
|
||||
if (found) {
|
||||
if ((0, import_common.isARM)(arch)) {
|
||||
return found.baseImageARM || found.baseImage;
|
||||
}
|
||||
return found.baseImage;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
function generateDatabaseConfiguration(database, arch) {
|
||||
const { id, dbUser, dbUserPassword, rootUser, rootUserPassword, defaultDatabase, version: version2, type } = database;
|
||||
const baseImage = getDatabaseImage(type, arch);
|
||||
if (type === "mysql") {
|
||||
const configuration = {
|
||||
privatePort: 3306,
|
||||
environmentVariables: {
|
||||
MYSQL_USER: dbUser,
|
||||
MYSQL_PASSWORD: dbUserPassword,
|
||||
MYSQL_ROOT_PASSWORD: rootUserPassword,
|
||||
MYSQL_ROOT_USER: rootUser,
|
||||
MYSQL_DATABASE: defaultDatabase
|
||||
},
|
||||
image: `${baseImage}:${version2}`,
|
||||
volume: `${id}-${type}-data:/bitnami/mysql/data`,
|
||||
ulimits: {}
|
||||
};
|
||||
if ((0, import_common.isARM)(arch)) {
|
||||
configuration.volume = `${id}-${type}-data:/var/lib/mysql`;
|
||||
}
|
||||
return configuration;
|
||||
} else if (type === "mariadb") {
|
||||
const configuration = {
|
||||
privatePort: 3306,
|
||||
environmentVariables: {
|
||||
MARIADB_ROOT_USER: rootUser,
|
||||
MARIADB_ROOT_PASSWORD: rootUserPassword,
|
||||
MARIADB_USER: dbUser,
|
||||
MARIADB_PASSWORD: dbUserPassword,
|
||||
MARIADB_DATABASE: defaultDatabase
|
||||
},
|
||||
image: `${baseImage}:${version2}`,
|
||||
volume: `${id}-${type}-data:/bitnami/mariadb`,
|
||||
ulimits: {}
|
||||
};
|
||||
if ((0, import_common.isARM)(arch)) {
|
||||
configuration.volume = `${id}-${type}-data:/var/lib/mysql`;
|
||||
}
|
||||
return configuration;
|
||||
} else if (type === "mongodb") {
|
||||
const configuration = {
|
||||
privatePort: 27017,
|
||||
environmentVariables: {
|
||||
MONGODB_ROOT_USER: rootUser,
|
||||
MONGODB_ROOT_PASSWORD: rootUserPassword
|
||||
},
|
||||
image: `${baseImage}:${version2}`,
|
||||
volume: `${id}-${type}-data:/bitnami/mongodb`,
|
||||
ulimits: {}
|
||||
};
|
||||
if ((0, import_common.isARM)(arch)) {
|
||||
configuration.environmentVariables = {
|
||||
MONGO_INITDB_ROOT_USERNAME: rootUser,
|
||||
MONGO_INITDB_ROOT_PASSWORD: rootUserPassword
|
||||
};
|
||||
configuration.volume = `${id}-${type}-data:/data/db`;
|
||||
}
|
||||
return configuration;
|
||||
} else if (type === "postgresql") {
|
||||
const configuration = {
|
||||
privatePort: 5432,
|
||||
environmentVariables: {
|
||||
POSTGRESQL_POSTGRES_PASSWORD: rootUserPassword,
|
||||
POSTGRESQL_PASSWORD: dbUserPassword,
|
||||
POSTGRESQL_USERNAME: dbUser,
|
||||
POSTGRESQL_DATABASE: defaultDatabase
|
||||
},
|
||||
image: `${baseImage}:${version2}`,
|
||||
volume: `${id}-${type}-data:/bitnami/postgresql`,
|
||||
ulimits: {}
|
||||
};
|
||||
if ((0, import_common.isARM)(arch)) {
|
||||
configuration.volume = `${id}-${type}-data:/var/lib/postgresql`;
|
||||
configuration.environmentVariables = {
|
||||
POSTGRES_PASSWORD: dbUserPassword,
|
||||
POSTGRES_USER: dbUser,
|
||||
POSTGRES_DB: defaultDatabase
|
||||
};
|
||||
}
|
||||
return configuration;
|
||||
} else if (type === "redis") {
|
||||
const {
|
||||
settings: { appendOnly }
|
||||
} = database;
|
||||
const configuration = {
|
||||
privatePort: 6379,
|
||||
command: void 0,
|
||||
environmentVariables: {
|
||||
REDIS_PASSWORD: dbUserPassword,
|
||||
REDIS_AOF_ENABLED: appendOnly ? "yes" : "no"
|
||||
},
|
||||
image: `${baseImage}:${version2}`,
|
||||
volume: `${id}-${type}-data:/bitnami/redis/data`,
|
||||
ulimits: {}
|
||||
};
|
||||
if ((0, import_common.isARM)(arch)) {
|
||||
configuration.volume = `${id}-${type}-data:/data`;
|
||||
configuration.command = `/usr/local/bin/redis-server --appendonly ${appendOnly ? "yes" : "no"} --requirepass ${dbUserPassword}`;
|
||||
}
|
||||
return configuration;
|
||||
} else if (type === "couchdb") {
|
||||
const configuration = {
|
||||
privatePort: 5984,
|
||||
environmentVariables: {
|
||||
COUCHDB_PASSWORD: dbUserPassword,
|
||||
COUCHDB_USER: dbUser
|
||||
},
|
||||
image: `${baseImage}:${version2}`,
|
||||
volume: `${id}-${type}-data:/bitnami/couchdb`,
|
||||
ulimits: {}
|
||||
};
|
||||
if ((0, import_common.isARM)(arch)) {
|
||||
configuration.volume = `${id}-${type}-data:/opt/couchdb/data`;
|
||||
}
|
||||
return configuration;
|
||||
} else if (type === "edgedb") {
|
||||
const configuration = {
|
||||
privatePort: 5656,
|
||||
environmentVariables: {
|
||||
EDGEDB_SERVER_PASSWORD: rootUserPassword,
|
||||
EDGEDB_SERVER_USER: rootUser,
|
||||
EDGEDB_SERVER_DATABASE: defaultDatabase,
|
||||
EDGEDB_SERVER_TLS_CERT_MODE: "generate_self_signed"
|
||||
},
|
||||
image: `${baseImage}:${version2}`,
|
||||
volume: `${id}-${type}-data:/var/lib/edgedb/data`,
|
||||
ulimits: {}
|
||||
};
|
||||
return configuration;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function getDatabaseVersions(type, arch) {
|
||||
const found = supportedDatabaseTypesAndVersions.find((t) => t.name === type);
|
||||
if (found) {
|
||||
if ((0, import_common.isARM)(arch)) {
|
||||
return found.versionsARM || found.versions;
|
||||
}
|
||||
return found.versions;
|
||||
}
|
||||
return [];
|
||||
}
|
||||
async function updatePasswordInDb(database, user, newPassword, isRoot) {
|
||||
const {
|
||||
id,
|
||||
type,
|
||||
rootUser,
|
||||
rootUserPassword,
|
||||
dbUser,
|
||||
dbUserPassword,
|
||||
defaultDatabase,
|
||||
destinationDockerId,
|
||||
destinationDocker: { id: dockerId }
|
||||
} = database;
|
||||
if (destinationDockerId) {
|
||||
if (type === "mysql") {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e "ALTER USER '${user}'@'%' IDENTIFIED WITH caching_sha2_password BY '${newPassword}';"`
|
||||
});
|
||||
} else if (type === "mariadb") {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e "SET PASSWORD FOR '${user}'@'%' = PASSWORD('${newPassword}');"`
|
||||
});
|
||||
} else if (type === "postgresql") {
|
||||
if (isRoot) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker exec ${id} psql postgresql://postgres:${rootUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role postgres WITH PASSWORD '${newPassword}'"`
|
||||
});
|
||||
} else {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker exec ${id} psql postgresql://${dbUser}:${dbUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role ${user} WITH PASSWORD '${newPassword}'"`
|
||||
});
|
||||
}
|
||||
} else if (type === "mongodb") {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker exec ${id} mongo 'mongodb://${rootUser}:${rootUserPassword}@${id}:27017/admin?readPreference=primary&ssl=false' --eval "db.changeUserPassword('${user}','${newPassword}')"`
|
||||
});
|
||||
} else if (type === "redis") {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker exec ${id} redis-cli -u redis://${dbUserPassword}@${id}:6379 --raw CONFIG SET requirepass ${newPassword}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
async function makeLabelForStandaloneDatabase({ id, image, volume }) {
|
||||
const database = await import_prisma.prisma.database.findFirst({ where: { id } });
|
||||
delete database.destinationDockerId;
|
||||
delete database.createdAt;
|
||||
delete database.updatedAt;
|
||||
return [
|
||||
"coolify.managed=true",
|
||||
`coolify.version=${import_common.version}`,
|
||||
`coolify.type=standalone-database`,
|
||||
`coolify.name=${database.name}`,
|
||||
`coolify.configuration=${(0, import_common.base64Encode)(
|
||||
JSON.stringify({
|
||||
version: import_common.version,
|
||||
image,
|
||||
volume,
|
||||
...database
|
||||
})
|
||||
)}`
|
||||
];
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
generateDatabaseConfiguration,
|
||||
getDatabaseImage,
|
||||
getDatabaseVersions,
|
||||
makeLabelForStandaloneDatabase,
|
||||
supportedDatabaseTypesAndVersions,
|
||||
updatePasswordInDb
|
||||
});
|
||||
@@ -0,0 +1,220 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var destinations_exports = {};
|
||||
__export(destinations_exports, {
|
||||
destinationsRouter: () => destinationsRouter
|
||||
});
|
||||
module.exports = __toCommonJS(destinations_exports);
|
||||
var import_zod = require("zod");
|
||||
var import_trpc = require("../../trpc");
|
||||
var import_common = require("../../../lib/common");
|
||||
var import_prisma = require("../../../prisma");
|
||||
var import_executeCommand = require("../../../lib/executeCommand");
|
||||
var import_docker = require("../../../lib/docker");
|
||||
const destinationsRouter = (0, import_trpc.router)({
|
||||
restartProxy: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
await (0, import_common.stopTraefikProxy)(id);
|
||||
await (0, import_common.startTraefikProxy)(id);
|
||||
await import_prisma.prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: true }
|
||||
});
|
||||
}),
|
||||
startProxy: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
await (0, import_common.startTraefikProxy)(id);
|
||||
}),
|
||||
stopProxy: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
await (0, import_common.stopTraefikProxy)(id);
|
||||
}),
|
||||
saveSettings: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
engine: import_zod.z.string(),
|
||||
isCoolifyProxyUsed: import_zod.z.boolean()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { id, engine, isCoolifyProxyUsed } = input;
|
||||
await import_prisma.prisma.destinationDocker.updateMany({
|
||||
where: { engine },
|
||||
data: { isCoolifyProxyUsed }
|
||||
});
|
||||
}),
|
||||
status: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).query(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const destination = await import_prisma.prisma.destinationDocker.findUnique({ where: { id } });
|
||||
const { found: isRunning } = await (0, import_docker.checkContainer)({
|
||||
dockerId: destination.id,
|
||||
container: "coolify-proxy",
|
||||
remove: true
|
||||
});
|
||||
return {
|
||||
isRunning
|
||||
};
|
||||
}),
|
||||
save: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
name: import_zod.z.string(),
|
||||
htmlUrl: import_zod.z.string(),
|
||||
apiUrl: import_zod.z.string(),
|
||||
customPort: import_zod.z.number(),
|
||||
customUser: import_zod.z.string(),
|
||||
isSystemWide: import_zod.z.boolean().default(false)
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { teamId } = ctx.user;
|
||||
let {
|
||||
id,
|
||||
name,
|
||||
network,
|
||||
engine,
|
||||
isCoolifyProxyUsed,
|
||||
remoteIpAddress,
|
||||
remoteUser,
|
||||
remotePort
|
||||
} = input;
|
||||
if (id === "new") {
|
||||
if (engine) {
|
||||
const { stdout } = await await (0, import_executeCommand.executeCommand)({
|
||||
command: `docker network ls --filter 'name=^${network}$' --format '{{json .}}'`
|
||||
});
|
||||
if (stdout === "") {
|
||||
await await (0, import_executeCommand.executeCommand)({
|
||||
command: `docker network create --attachable ${network}`
|
||||
});
|
||||
}
|
||||
await import_prisma.prisma.destinationDocker.create({
|
||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
||||
});
|
||||
const destinations = await import_prisma.prisma.destinationDocker.findMany({ where: { engine } });
|
||||
const destination = destinations.find((destination2) => destination2.network === network);
|
||||
if (destinations.length > 0) {
|
||||
const proxyConfigured = destinations.find(
|
||||
(destination2) => destination2.network !== network && destination2.isCoolifyProxyUsed === true
|
||||
);
|
||||
if (proxyConfigured) {
|
||||
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
|
||||
}
|
||||
await import_prisma.prisma.destinationDocker.updateMany({
|
||||
where: { engine },
|
||||
data: { isCoolifyProxyUsed }
|
||||
});
|
||||
}
|
||||
if (isCoolifyProxyUsed) {
|
||||
await (0, import_common.startTraefikProxy)(destination.id);
|
||||
}
|
||||
return { id: destination.id };
|
||||
} else {
|
||||
const destination = await import_prisma.prisma.destinationDocker.create({
|
||||
data: {
|
||||
name,
|
||||
teams: { connect: { id: teamId } },
|
||||
engine,
|
||||
network,
|
||||
isCoolifyProxyUsed,
|
||||
remoteEngine: true,
|
||||
remoteIpAddress,
|
||||
remoteUser,
|
||||
remotePort: Number(remotePort)
|
||||
}
|
||||
});
|
||||
return { id: destination.id };
|
||||
}
|
||||
} else {
|
||||
await import_prisma.prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
|
||||
return {};
|
||||
}
|
||||
}),
|
||||
check: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
network: import_zod.z.string()
|
||||
})
|
||||
).query(async ({ input, ctx }) => {
|
||||
const { network } = input;
|
||||
const found = await import_prisma.prisma.destinationDocker.findFirst({ where: { network } });
|
||||
if (found) {
|
||||
throw {
|
||||
message: `Network already exists: ${network}`
|
||||
};
|
||||
}
|
||||
}),
|
||||
delete: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const { network, remoteVerified, engine, isCoolifyProxyUsed } = await import_prisma.prisma.destinationDocker.findUnique({ where: { id } });
|
||||
if (isCoolifyProxyUsed) {
|
||||
if (engine || remoteVerified) {
|
||||
const { stdout: found } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: id,
|
||||
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
|
||||
});
|
||||
if (found) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: id,
|
||||
command: `docker network disconnect ${network} coolify-proxy`
|
||||
});
|
||||
await (0, import_executeCommand.executeCommand)({ dockerId: id, command: `docker network rm ${network}` });
|
||||
}
|
||||
}
|
||||
}
|
||||
await import_prisma.prisma.destinationDocker.delete({ where: { id } });
|
||||
}),
|
||||
getDestinationById: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string()
|
||||
})
|
||||
).query(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const { teamId } = ctx.user;
|
||||
const destination = await import_prisma.prisma.destinationDocker.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { sshKey: true, application: true, service: true, database: true }
|
||||
});
|
||||
if (!destination && id !== "new") {
|
||||
throw { status: 404, message: `Destination not found.` };
|
||||
}
|
||||
const settings = await (0, import_common.listSettings)();
|
||||
return {
|
||||
destination,
|
||||
settings
|
||||
};
|
||||
})
|
||||
});
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
destinationsRouter
|
||||
});
|
||||
25
apps/trpc-experimental/server/build/trpc/routers/index.js
Normal file
25
apps/trpc-experimental/server/build/trpc/routers/index.js
Normal file
@@ -0,0 +1,25 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var routers_exports = {};
|
||||
module.exports = __toCommonJS(routers_exports);
|
||||
__reExport(routers_exports, require("./auth"), module.exports);
|
||||
__reExport(routers_exports, require("./dashboard"), module.exports);
|
||||
__reExport(routers_exports, require("./settings"), module.exports);
|
||||
__reExport(routers_exports, require("./applications"), module.exports);
|
||||
__reExport(routers_exports, require("./services"), module.exports);
|
||||
__reExport(routers_exports, require("./databases"), module.exports);
|
||||
__reExport(routers_exports, require("./sources"), module.exports);
|
||||
__reExport(routers_exports, require("./destinations"), module.exports);
|
||||
@@ -0,0 +1,846 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var services_exports = {};
|
||||
__export(services_exports, {
|
||||
getServiceFromDB: () => getServiceFromDB,
|
||||
servicesRouter: () => servicesRouter
|
||||
});
|
||||
module.exports = __toCommonJS(services_exports);
|
||||
var import_zod = require("zod");
|
||||
var import_js_yaml = __toESM(require("js-yaml"));
|
||||
var import_promises = __toESM(require("fs/promises"));
|
||||
var import_path = __toESM(require("path"));
|
||||
var import_trpc = require("../../trpc");
|
||||
var import_common = require("../../../lib/common");
|
||||
var import_prisma = require("../../../prisma");
|
||||
var import_executeCommand = require("../../../lib/executeCommand");
|
||||
var import_lib = require("./lib");
|
||||
var import_docker = require("../../../lib/docker");
|
||||
var import_cuid = __toESM(require("cuid"));
|
||||
var import_dayjs = require("../../../lib/dayjs");
|
||||
const servicesRouter = (0, import_trpc.router)({
|
||||
getLogs: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
containerId: import_zod.z.string(),
|
||||
since: import_zod.z.number().optional().default(0)
|
||||
})
|
||||
).query(async ({ input, ctx }) => {
|
||||
let { id, containerId, since } = input;
|
||||
if (since !== 0) {
|
||||
since = (0, import_dayjs.day)(since).unix();
|
||||
}
|
||||
const {
|
||||
destinationDockerId,
|
||||
destinationDocker: { id: dockerId }
|
||||
} = await import_prisma.prisma.service.findUnique({
|
||||
where: { id },
|
||||
include: { destinationDocker: true }
|
||||
});
|
||||
if (destinationDockerId) {
|
||||
try {
|
||||
const { default: ansi } = await import("strip-ansi");
|
||||
const { stdout, stderr } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId,
|
||||
command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}`
|
||||
});
|
||||
const stripLogsStdout = stdout.toString().split("\n").map((l) => ansi(l)).filter((a) => a);
|
||||
const stripLogsStderr = stderr.toString().split("\n").map((l) => ansi(l)).filter((a) => a);
|
||||
const logs = stripLogsStderr.concat(stripLogsStdout);
|
||||
const sortedLogs = logs.sort(
|
||||
(a, b) => (0, import_dayjs.day)(a.split(" ")[0]).isAfter((0, import_dayjs.day)(b.split(" ")[0])) ? 1 : -1
|
||||
);
|
||||
return {
|
||||
data: {
|
||||
logs: sortedLogs
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
const { statusCode, stderr } = error;
|
||||
if (stderr.startsWith("Error: No such container")) {
|
||||
return {
|
||||
data: {
|
||||
logs: [],
|
||||
noContainer: true
|
||||
}
|
||||
};
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
return {
|
||||
data: {
|
||||
logs: []
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
message: "No logs found."
|
||||
};
|
||||
}),
|
||||
deleteStorage: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
storageId: import_zod.z.string()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { storageId } = input;
|
||||
await import_prisma.prisma.servicePersistentStorage.deleteMany({ where: { id: storageId } });
|
||||
}),
|
||||
saveStorage: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
path: import_zod.z.string(),
|
||||
isNewStorage: import_zod.z.boolean(),
|
||||
storageId: import_zod.z.string().optional().nullable(),
|
||||
containerId: import_zod.z.string().optional()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { id, path: path2, isNewStorage, storageId, containerId } = input;
|
||||
if (isNewStorage) {
|
||||
const volumeName = `${id}-custom${path2.replace(/\//gi, "-")}`;
|
||||
const found = await import_prisma.prisma.servicePersistentStorage.findFirst({
|
||||
where: { path: path2, containerId }
|
||||
});
|
||||
if (found) {
|
||||
throw {
|
||||
status: 500,
|
||||
message: "Persistent storage already exists for this container and path."
|
||||
};
|
||||
}
|
||||
await import_prisma.prisma.servicePersistentStorage.create({
|
||||
data: { path: path2, volumeName, containerId, service: { connect: { id } } }
|
||||
});
|
||||
} else {
|
||||
await import_prisma.prisma.servicePersistentStorage.update({
|
||||
where: { id: storageId },
|
||||
data: { path: path2, containerId }
|
||||
});
|
||||
}
|
||||
}),
|
||||
getStorages: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).query(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const persistentStorages = await import_prisma.prisma.servicePersistentStorage.findMany({
|
||||
where: { serviceId: id }
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
persistentStorages
|
||||
}
|
||||
};
|
||||
}),
|
||||
deleteSecret: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string(), name: import_zod.z.string() })).mutation(async ({ input, ctx }) => {
|
||||
const { id, name } = input;
|
||||
await import_prisma.prisma.serviceSecret.deleteMany({ where: { serviceId: id, name } });
|
||||
}),
|
||||
saveService: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
name: import_zod.z.string(),
|
||||
fqdn: import_zod.z.string().optional(),
|
||||
exposePort: import_zod.z.string().optional(),
|
||||
type: import_zod.z.string(),
|
||||
serviceSetting: import_zod.z.any(),
|
||||
version: import_zod.z.string().optional()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const teamId = ctx.user?.teamId;
|
||||
let { id, name, fqdn, exposePort, type, serviceSetting, version } = input;
|
||||
if (fqdn)
|
||||
fqdn = fqdn.toLowerCase();
|
||||
if (exposePort)
|
||||
exposePort = Number(exposePort);
|
||||
type = (0, import_common.fixType)(type);
|
||||
const data = {
|
||||
fqdn,
|
||||
name,
|
||||
exposePort,
|
||||
version
|
||||
};
|
||||
const templates = await (0, import_common.getTemplates)();
|
||||
const service = await import_prisma.prisma.service.findUnique({ where: { id } });
|
||||
const foundTemplate = templates.find((t) => (0, import_common.fixType)(t.type) === (0, import_common.fixType)(service.type));
|
||||
for (const setting of serviceSetting) {
|
||||
let { id: settingId, name: name2, value, changed = false, isNew = false, variableName } = setting;
|
||||
if (value) {
|
||||
if (changed) {
|
||||
await import_prisma.prisma.serviceSetting.update({ where: { id: settingId }, data: { value } });
|
||||
}
|
||||
if (isNew) {
|
||||
if (!variableName) {
|
||||
variableName = foundTemplate?.variables.find((v) => v.name === name2).id;
|
||||
}
|
||||
await import_prisma.prisma.serviceSetting.create({
|
||||
data: { name: name2, value, variableName, service: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
await import_prisma.prisma.service.update({
|
||||
where: { id },
|
||||
data
|
||||
});
|
||||
}),
|
||||
createSecret: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
name: import_zod.z.string(),
|
||||
value: import_zod.z.string(),
|
||||
isBuildSecret: import_zod.z.boolean().optional(),
|
||||
isPRMRSecret: import_zod.z.boolean().optional(),
|
||||
isNew: import_zod.z.boolean().optional()
|
||||
})
|
||||
).mutation(async ({ input }) => {
|
||||
let { id, name, value, isNew } = input;
|
||||
if (isNew) {
|
||||
const found = await import_prisma.prisma.serviceSecret.findFirst({ where: { name, serviceId: id } });
|
||||
if (found) {
|
||||
throw `Secret ${name} already exists.`;
|
||||
} else {
|
||||
value = (0, import_common.encrypt)(value.trim());
|
||||
await import_prisma.prisma.serviceSecret.create({
|
||||
data: { name, value, service: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
} else {
|
||||
value = (0, import_common.encrypt)(value.trim());
|
||||
const found = await import_prisma.prisma.serviceSecret.findFirst({ where: { serviceId: id, name } });
|
||||
if (found) {
|
||||
await import_prisma.prisma.serviceSecret.updateMany({
|
||||
where: { serviceId: id, name },
|
||||
data: { value }
|
||||
});
|
||||
} else {
|
||||
await import_prisma.prisma.serviceSecret.create({
|
||||
data: { name, value, service: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
getSecrets: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).query(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const service = await getServiceFromDB({ id, teamId });
|
||||
let secrets = await import_prisma.prisma.serviceSecret.findMany({
|
||||
where: { serviceId: id },
|
||||
orderBy: { createdAt: "desc" }
|
||||
});
|
||||
const templates = await (0, import_common.getTemplates)();
|
||||
if (!templates)
|
||||
throw new Error("No templates found. Please contact support.");
|
||||
const foundTemplate = templates.find((t) => (0, import_common.fixType)(t.type) === service.type);
|
||||
secrets = secrets.map((secret) => {
|
||||
const foundVariable = foundTemplate?.variables?.find((v) => v.name === secret.name) || null;
|
||||
if (foundVariable) {
|
||||
secret.readOnly = foundVariable.readOnly;
|
||||
}
|
||||
secret.value = (0, import_common.decrypt)(secret.value);
|
||||
return secret;
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
secrets
|
||||
}
|
||||
};
|
||||
}),
|
||||
wordpress: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string(), ftpEnabled: import_zod.z.boolean() })).mutation(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const {
|
||||
service: {
|
||||
destinationDocker: { engine, remoteEngine, remoteIpAddress }
|
||||
}
|
||||
} = await import_prisma.prisma.wordpress.findUnique({
|
||||
where: { serviceId: id },
|
||||
include: { service: { include: { destinationDocker: true } } }
|
||||
});
|
||||
const publicPort = await (0, import_lib.getFreePublicPort)({ id, remoteEngine, engine, remoteIpAddress });
|
||||
let ftpUser = (0, import_cuid.default)();
|
||||
let ftpPassword = (0, import_lib.generatePassword)({});
|
||||
const hostkeyDir = import_common.isDev ? "/tmp/hostkeys" : "/app/ssl/hostkeys";
|
||||
try {
|
||||
const data = await import_prisma.prisma.wordpress.update({
|
||||
where: { serviceId: id },
|
||||
data: { ftpEnabled },
|
||||
include: { service: { include: { destinationDocker: true } } }
|
||||
});
|
||||
const {
|
||||
service: { destinationDockerId, destinationDocker },
|
||||
ftpPublicPort,
|
||||
ftpUser: user,
|
||||
ftpPassword: savedPassword,
|
||||
ftpHostKey,
|
||||
ftpHostKeyPrivate
|
||||
} = data;
|
||||
const { network, engine: engine2 } = destinationDocker;
|
||||
if (ftpEnabled) {
|
||||
if (user)
|
||||
ftpUser = user;
|
||||
if (savedPassword)
|
||||
ftpPassword = (0, import_common.decrypt)(savedPassword);
|
||||
const { stdout: password } = await (0, import_executeCommand.executeCommand)({
|
||||
command: `echo ${ftpPassword} | openssl passwd -1 -stdin`,
|
||||
shell: true
|
||||
});
|
||||
if (destinationDockerId) {
|
||||
try {
|
||||
await import_promises.default.stat(hostkeyDir);
|
||||
} catch (error) {
|
||||
await (0, import_executeCommand.executeCommand)({ command: `mkdir -p ${hostkeyDir}` });
|
||||
}
|
||||
if (!ftpHostKey) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
command: `ssh-keygen -t ed25519 -f ssh_host_ed25519_key -N "" -q -f ${hostkeyDir}/${id}.ed25519`
|
||||
});
|
||||
const { stdout: ftpHostKey2 } = await (0, import_executeCommand.executeCommand)({
|
||||
command: `cat ${hostkeyDir}/${id}.ed25519`
|
||||
});
|
||||
await import_prisma.prisma.wordpress.update({
|
||||
where: { serviceId: id },
|
||||
data: { ftpHostKey: (0, import_common.encrypt)(ftpHostKey2) }
|
||||
});
|
||||
} else {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
command: `echo "${(0, import_common.decrypt)(ftpHostKey)}" > ${hostkeyDir}/${id}.ed25519`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
if (!ftpHostKeyPrivate) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
command: `ssh-keygen -t rsa -b 4096 -N "" -f ${hostkeyDir}/${id}.rsa`
|
||||
});
|
||||
const { stdout: ftpHostKeyPrivate2 } = await (0, import_executeCommand.executeCommand)({
|
||||
command: `cat ${hostkeyDir}/${id}.rsa`
|
||||
});
|
||||
await import_prisma.prisma.wordpress.update({
|
||||
where: { serviceId: id },
|
||||
data: { ftpHostKeyPrivate: (0, import_common.encrypt)(ftpHostKeyPrivate2) }
|
||||
});
|
||||
} else {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
command: `echo "${(0, import_common.decrypt)(ftpHostKeyPrivate)}" > ${hostkeyDir}/${id}.rsa`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
await import_prisma.prisma.wordpress.update({
|
||||
where: { serviceId: id },
|
||||
data: {
|
||||
ftpPublicPort: publicPort,
|
||||
ftpUser: user ? void 0 : ftpUser,
|
||||
ftpPassword: savedPassword ? void 0 : (0, import_common.encrypt)(ftpPassword)
|
||||
}
|
||||
});
|
||||
try {
|
||||
const { found: isRunning } = await (0, import_docker.checkContainer)({
|
||||
dockerId: destinationDocker.id,
|
||||
container: `${id}-ftp`
|
||||
});
|
||||
if (isRunning) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
const volumes = [
|
||||
`${id}-wordpress-data:/home/${ftpUser}/wordpress`,
|
||||
`${import_common.isDev ? hostkeyDir : "/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys"}/${id}.ed25519:/etc/ssh/ssh_host_ed25519_key`,
|
||||
`${import_common.isDev ? hostkeyDir : "/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys"}/${id}.rsa:/etc/ssh/ssh_host_rsa_key`,
|
||||
`${import_common.isDev ? hostkeyDir : "/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys"}/${id}.sh:/etc/sftp.d/chmod.sh`
|
||||
];
|
||||
const compose = {
|
||||
version: "3.8",
|
||||
services: {
|
||||
[`${id}-ftp`]: {
|
||||
image: `atmoz/sftp:alpine`,
|
||||
command: `'${ftpUser}:${password.replace("\n", "").replace(/\$/g, "$$$")}:e:33'`,
|
||||
extra_hosts: ["host.docker.internal:host-gateway"],
|
||||
container_name: `${id}-ftp`,
|
||||
volumes,
|
||||
networks: [network],
|
||||
depends_on: [],
|
||||
restart: "always"
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: {
|
||||
[`${id}-wordpress-data`]: {
|
||||
external: true,
|
||||
name: `${id}-wordpress-data`
|
||||
}
|
||||
}
|
||||
};
|
||||
await import_promises.default.writeFile(
|
||||
`${hostkeyDir}/${id}.sh`,
|
||||
`#!/bin/bash
|
||||
chmod 600 /etc/ssh/ssh_host_ed25519_key /etc/ssh/ssh_host_rsa_key
|
||||
userdel -f xfs
|
||||
chown -R 33:33 /home/${ftpUser}/wordpress/`
|
||||
);
|
||||
await (0, import_executeCommand.executeCommand)({ command: `chmod +x ${hostkeyDir}/${id}.sh` });
|
||||
await import_promises.default.writeFile(`${hostkeyDir}/${id}-docker-compose.yml`, import_js_yaml.default.dump(compose));
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker compose -f ${hostkeyDir}/${id}-docker-compose.yml up -d`
|
||||
});
|
||||
}
|
||||
return {
|
||||
publicPort,
|
||||
ftpUser,
|
||||
ftpPassword
|
||||
};
|
||||
} else {
|
||||
await import_prisma.prisma.wordpress.update({
|
||||
where: { serviceId: id },
|
||||
data: { ftpPublicPort: null }
|
||||
});
|
||||
try {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`,
|
||||
shell: true
|
||||
});
|
||||
} catch (error) {
|
||||
}
|
||||
await (0, import_docker.stopTcpHttpProxy)(id, destinationDocker, ftpPublicPort);
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
throw message;
|
||||
} finally {
|
||||
try {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
command: `rm -fr ${hostkeyDir}/${id}-docker-compose.yml ${hostkeyDir}/${id}.ed25519 ${hostkeyDir}/${id}.ed25519.pub ${hostkeyDir}/${id}.rsa ${hostkeyDir}/${id}.rsa.pub ${hostkeyDir}/${id}.sh`
|
||||
});
|
||||
} catch (error) {
|
||||
}
|
||||
}
|
||||
}),
|
||||
start: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).mutation(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const service = await getServiceFromDB({ id, teamId });
|
||||
const arm = (0, import_common.isARM)(service.arch);
|
||||
const { type, destinationDockerId, destinationDocker, persistentStorage, exposePort } = service;
|
||||
const { workdir } = await (0, import_common.createDirectories)({ repository: type, buildId: id });
|
||||
const template = await (0, import_lib.parseAndFindServiceTemplates)(service, workdir, true);
|
||||
const network = destinationDockerId && destinationDocker.network;
|
||||
const config = {};
|
||||
for (const s in template.services) {
|
||||
let newEnvironments = [];
|
||||
if (arm) {
|
||||
if (template.services[s]?.environmentArm?.length > 0) {
|
||||
for (const environment of template.services[s].environmentArm) {
|
||||
let [env, ...value] = environment.split("=");
|
||||
value = value.join("=");
|
||||
if (!value.startsWith("$$secret") && value !== "") {
|
||||
newEnvironments.push(`${env}=${value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (template.services[s]?.environment?.length > 0) {
|
||||
for (const environment of template.services[s].environment) {
|
||||
let [env, ...value] = environment.split("=");
|
||||
value = value.join("=");
|
||||
if (!value.startsWith("$$secret") && value !== "") {
|
||||
newEnvironments.push(`${env}=${value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const secrets = await (0, import_lib.verifyAndDecryptServiceSecrets)(id);
|
||||
for (const secret of secrets) {
|
||||
const { name, value } = secret;
|
||||
if (value) {
|
||||
const foundEnv = !!template.services[s].environment?.find(
|
||||
(env) => env.startsWith(`${name}=`)
|
||||
);
|
||||
const foundNewEnv = !!newEnvironments?.find((env) => env.startsWith(`${name}=`));
|
||||
if (foundEnv && !foundNewEnv) {
|
||||
newEnvironments.push(`${name}=${value}`);
|
||||
}
|
||||
if (!foundEnv && !foundNewEnv && s === id) {
|
||||
newEnvironments.push(`${name}=${value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
const customVolumes = await import_prisma.prisma.servicePersistentStorage.findMany({
|
||||
where: { serviceId: id }
|
||||
});
|
||||
let volumes = /* @__PURE__ */ new Set();
|
||||
if (arm) {
|
||||
template.services[s]?.volumesArm && template.services[s].volumesArm.length > 0 && template.services[s].volumesArm.forEach((v) => volumes.add(v));
|
||||
} else {
|
||||
template.services[s]?.volumes && template.services[s].volumes.length > 0 && template.services[s].volumes.forEach((v) => volumes.add(v));
|
||||
}
|
||||
if (service.type === "plausibleanalytics" && service.plausibleAnalytics?.id) {
|
||||
let temp = Array.from(volumes);
|
||||
temp.forEach((a) => {
|
||||
const t = a.replace(service.id, service.plausibleAnalytics.id);
|
||||
volumes.delete(a);
|
||||
volumes.add(t);
|
||||
});
|
||||
}
|
||||
if (customVolumes.length > 0) {
|
||||
for (const customVolume of customVolumes) {
|
||||
const { volumeName, path: path2, containerId } = customVolume;
|
||||
if (volumes && volumes.size > 0 && !volumes.has(`${volumeName}:${path2}`) && containerId === service) {
|
||||
volumes.add(`${volumeName}:${path2}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
let ports = [];
|
||||
if (template.services[s].proxy?.length > 0) {
|
||||
for (const proxy of template.services[s].proxy) {
|
||||
if (proxy.hostPort) {
|
||||
ports.push(`${proxy.hostPort}:${proxy.port}`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (template.services[s].ports?.length === 1) {
|
||||
for (const port of template.services[s].ports) {
|
||||
if (exposePort) {
|
||||
ports.push(`${exposePort}:${port}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let image = template.services[s].image;
|
||||
if (arm && template.services[s].imageArm) {
|
||||
image = template.services[s].imageArm;
|
||||
}
|
||||
config[s] = {
|
||||
container_name: s,
|
||||
build: template.services[s].build || void 0,
|
||||
command: template.services[s].command,
|
||||
entrypoint: template.services[s]?.entrypoint,
|
||||
image,
|
||||
expose: template.services[s].ports,
|
||||
ports: ports.length > 0 ? ports : void 0,
|
||||
volumes: Array.from(volumes),
|
||||
environment: newEnvironments,
|
||||
depends_on: template.services[s]?.depends_on,
|
||||
ulimits: template.services[s]?.ulimits,
|
||||
cap_drop: template.services[s]?.cap_drop,
|
||||
cap_add: template.services[s]?.cap_add,
|
||||
labels: (0, import_common.makeLabelForServices)(type),
|
||||
...(0, import_docker.defaultComposeConfiguration)(network)
|
||||
};
|
||||
if (template.services[s]?.files?.length > 0) {
|
||||
if (!config[s].build) {
|
||||
config[s].build = {
|
||||
context: workdir,
|
||||
dockerfile: `Dockerfile.${s}`
|
||||
};
|
||||
}
|
||||
let Dockerfile = `
|
||||
FROM ${template.services[s].image}`;
|
||||
for (const file of template.services[s].files) {
|
||||
const { location, content } = file;
|
||||
const source = import_path.default.join(workdir, location);
|
||||
await import_promises.default.mkdir(import_path.default.dirname(source), { recursive: true });
|
||||
await import_promises.default.writeFile(source, content);
|
||||
Dockerfile += `
|
||||
COPY .${location} ${location}`;
|
||||
}
|
||||
await import_promises.default.writeFile(`${workdir}/Dockerfile.${s}`, Dockerfile);
|
||||
}
|
||||
}
|
||||
const { volumeMounts } = (0, import_lib.persistentVolumes)(id, persistentStorage, config);
|
||||
const composeFile = {
|
||||
version: "3.8",
|
||||
services: config,
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: volumeMounts
|
||||
};
|
||||
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
||||
await import_promises.default.writeFile(composeFileDestination, import_js_yaml.default.dump(composeFile));
|
||||
let fastify = null;
|
||||
await (0, import_lib.startServiceContainers)(fastify, id, teamId, destinationDocker.id, composeFileDestination);
|
||||
if (service.type === "minio") {
|
||||
try {
|
||||
const { stdout: containers } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split("\n");
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker stop -t 0 ${container}`
|
||||
});
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker rm --force ${container}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
try {
|
||||
const { stdout: containers } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split("\n");
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker stop -t 0 ${container}`
|
||||
});
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker rm --force ${container}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
}
|
||||
}),
|
||||
stop: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).mutation(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const { destinationDockerId } = await getServiceFromDB({ id, teamId });
|
||||
if (destinationDockerId) {
|
||||
const { stdout: containers } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split("\n");
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker stop -t 0 ${container}`
|
||||
});
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker rm --force ${container}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
}),
|
||||
getServices: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).query(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const service = await getServiceFromDB({ id, teamId });
|
||||
if (!service) {
|
||||
throw { status: 404, message: "Service not found." };
|
||||
}
|
||||
let template = {};
|
||||
let tags = [];
|
||||
if (service.type) {
|
||||
template = await (0, import_lib.parseAndFindServiceTemplates)(service);
|
||||
tags = await (0, import_common.getTags)(service.type);
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
settings: await (0, import_common.listSettings)(),
|
||||
service,
|
||||
template,
|
||||
tags
|
||||
}
|
||||
};
|
||||
}),
|
||||
status: import_trpc.privateProcedure.input(import_zod.z.object({ id: import_zod.z.string() })).query(async ({ ctx, input }) => {
|
||||
const id = input.id;
|
||||
const teamId = ctx.user?.teamId;
|
||||
if (!teamId) {
|
||||
throw { status: 400, message: "Team not found." };
|
||||
}
|
||||
const service = await getServiceFromDB({ id, teamId });
|
||||
const { destinationDockerId } = service;
|
||||
let payload = {};
|
||||
if (destinationDockerId) {
|
||||
const { stdout: containers } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: service.destinationDocker.id,
|
||||
command: `docker ps -a --filter "label=com.docker.compose.project=${id}" --format '{{json .}}'`
|
||||
});
|
||||
if (containers) {
|
||||
const containersArray = containers.trim().split("\n");
|
||||
if (containersArray.length > 0 && containersArray[0] !== "") {
|
||||
const templates = await (0, import_common.getTemplates)();
|
||||
let template = templates.find((t) => t.type === service.type);
|
||||
const templateStr = JSON.stringify(template);
|
||||
if (templateStr) {
|
||||
template = JSON.parse(templateStr.replaceAll("$$id", service.id));
|
||||
}
|
||||
for (const container of containersArray) {
|
||||
let isRunning = false;
|
||||
let isExited = false;
|
||||
let isRestarting = false;
|
||||
let isExcluded = false;
|
||||
const containerObj = JSON.parse(container);
|
||||
const exclude = template?.services[containerObj.Names]?.exclude;
|
||||
if (exclude) {
|
||||
payload[containerObj.Names] = {
|
||||
status: {
|
||||
isExcluded: true,
|
||||
isRunning: false,
|
||||
isExited: false,
|
||||
isRestarting: false
|
||||
}
|
||||
};
|
||||
continue;
|
||||
}
|
||||
const status = containerObj.State;
|
||||
if (status === "running") {
|
||||
isRunning = true;
|
||||
}
|
||||
if (status === "exited") {
|
||||
isExited = true;
|
||||
}
|
||||
if (status === "restarting") {
|
||||
isRestarting = true;
|
||||
}
|
||||
payload[containerObj.Names] = {
|
||||
status: {
|
||||
isExcluded,
|
||||
isRunning,
|
||||
isExited,
|
||||
isRestarting
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return payload;
|
||||
}),
|
||||
cleanup: import_trpc.privateProcedure.query(async ({ ctx }) => {
|
||||
const teamId = ctx.user?.teamId;
|
||||
let services = await import_prisma.prisma.service.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: { destinationDocker: true, teams: true }
|
||||
});
|
||||
for (const service of services) {
|
||||
if (!service.fqdn) {
|
||||
if (service.destinationDockerId) {
|
||||
const { stdout: containers } = await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: service.destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split("\n");
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: service.destinationDockerId,
|
||||
command: `docker stop -t 0 ${container}`
|
||||
});
|
||||
await (0, import_executeCommand.executeCommand)({
|
||||
dockerId: service.destinationDockerId,
|
||||
command: `docker rm --force ${container}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
await (0, import_common.removeService)({ id: service.id });
|
||||
}
|
||||
}
|
||||
}),
|
||||
delete: import_trpc.privateProcedure.input(import_zod.z.object({ force: import_zod.z.boolean(), id: import_zod.z.string() })).mutation(async ({ input }) => {
|
||||
const { id } = input;
|
||||
await import_prisma.prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.serviceSetting.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.servicePersistentStorage.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.meiliSearch.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.fider.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.ghost.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.umami.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.hasura.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.plausibleAnalytics.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.minio.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.vscodeserver.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.wordpress.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.glitchTip.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.moodle.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.appwrite.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.searxng.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.weblate.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.taiga.deleteMany({ where: { serviceId: id } });
|
||||
await import_prisma.prisma.service.delete({ where: { id } });
|
||||
return {};
|
||||
})
|
||||
});
|
||||
async function getServiceFromDB({
|
||||
id,
|
||||
teamId
|
||||
}) {
|
||||
const settings = await import_prisma.prisma.setting.findFirst();
|
||||
const body = await import_prisma.prisma.service.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
include: {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
serviceSetting: true,
|
||||
wordpress: true,
|
||||
plausibleAnalytics: true
|
||||
}
|
||||
});
|
||||
if (!body) {
|
||||
return null;
|
||||
}
|
||||
if (body?.serviceSecret.length > 0) {
|
||||
body.serviceSecret = body.serviceSecret.map((s) => {
|
||||
s.value = (0, import_common.decrypt)(s.value);
|
||||
return s;
|
||||
});
|
||||
}
|
||||
if (body.wordpress) {
|
||||
body.wordpress.ftpPassword = (0, import_common.decrypt)(body.wordpress.ftpPassword);
|
||||
}
|
||||
return { ...body, settings };
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
getServiceFromDB,
|
||||
servicesRouter
|
||||
});
|
||||
371
apps/trpc-experimental/server/build/trpc/routers/services/lib.js
Normal file
371
apps/trpc-experimental/server/build/trpc/routers/services/lib.js
Normal file
@@ -0,0 +1,371 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var lib_exports = {};
|
||||
__export(lib_exports, {
|
||||
generatePassword: () => generatePassword,
|
||||
getFreePublicPort: () => getFreePublicPort,
|
||||
parseAndFindServiceTemplates: () => parseAndFindServiceTemplates,
|
||||
persistentVolumes: () => persistentVolumes,
|
||||
startServiceContainers: () => startServiceContainers,
|
||||
verifyAndDecryptServiceSecrets: () => verifyAndDecryptServiceSecrets
|
||||
});
|
||||
module.exports = __toCommonJS(lib_exports);
|
||||
var import_common = require("../../../lib/common");
|
||||
var import_bcryptjs = __toESM(require("bcryptjs"));
|
||||
var import_prisma = require("../../../prisma");
|
||||
var import_crypto = __toESM(require("crypto"));
|
||||
var import_executeCommand = require("../../../lib/executeCommand");
|
||||
async function parseAndFindServiceTemplates(service, workdir, isDeploy = false) {
|
||||
const templates = await (0, import_common.getTemplates)();
|
||||
const foundTemplate = templates.find((t) => (0, import_common.fixType)(t.type) === service.type);
|
||||
let parsedTemplate = {};
|
||||
if (foundTemplate) {
|
||||
if (!isDeploy) {
|
||||
for (const [key, value] of Object.entries(foundTemplate.services)) {
|
||||
const realKey = key.replace("$$id", service.id);
|
||||
let name = value.name;
|
||||
if (!name) {
|
||||
if (Object.keys(foundTemplate.services).length === 1) {
|
||||
name = foundTemplate.name || service.name.toLowerCase();
|
||||
} else {
|
||||
if (key === "$$id") {
|
||||
name = foundTemplate.name || key.replaceAll("$$id-", "") || service.name.toLowerCase();
|
||||
} else {
|
||||
name = key.replaceAll("$$id-", "") || service.name.toLowerCase();
|
||||
}
|
||||
}
|
||||
}
|
||||
parsedTemplate[realKey] = {
|
||||
value,
|
||||
name,
|
||||
documentation: value.documentation || foundTemplate.documentation || "https://docs.coollabs.io",
|
||||
image: value.image,
|
||||
files: value?.files,
|
||||
environment: [],
|
||||
fqdns: [],
|
||||
hostPorts: [],
|
||||
proxy: {}
|
||||
};
|
||||
if (value.environment?.length > 0) {
|
||||
for (const env of value.environment) {
|
||||
let [envKey, ...envValue] = env.split("=");
|
||||
envValue = envValue.join("=");
|
||||
let variable = null;
|
||||
if (foundTemplate?.variables) {
|
||||
variable = foundTemplate?.variables.find((v) => v.name === envKey) || foundTemplate?.variables.find((v) => v.id === envValue);
|
||||
}
|
||||
if (variable) {
|
||||
const id = variable.id.replaceAll("$$", "");
|
||||
const label = variable?.label;
|
||||
const description = variable?.description;
|
||||
const defaultValue = variable?.defaultValue;
|
||||
const main = variable?.main || "$$id";
|
||||
const type = variable?.type || "input";
|
||||
const placeholder = variable?.placeholder || "";
|
||||
const readOnly = variable?.readOnly || false;
|
||||
const required = variable?.required || false;
|
||||
if (envValue.startsWith("$$config") || variable?.showOnConfiguration) {
|
||||
if (envValue.startsWith("$$config_coolify")) {
|
||||
continue;
|
||||
}
|
||||
parsedTemplate[realKey].environment.push({
|
||||
id,
|
||||
name: envKey,
|
||||
value: envValue,
|
||||
main,
|
||||
label,
|
||||
description,
|
||||
defaultValue,
|
||||
type,
|
||||
placeholder,
|
||||
required,
|
||||
readOnly
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (value?.proxy && value.proxy.length > 0) {
|
||||
for (const proxyValue of value.proxy) {
|
||||
if (proxyValue.domain) {
|
||||
const variable = foundTemplate?.variables.find((v) => v.id === proxyValue.domain);
|
||||
if (variable) {
|
||||
const { id, name: name2, label, description, defaultValue, required = false } = variable;
|
||||
const found = await import_prisma.prisma.serviceSetting.findFirst({
|
||||
where: { serviceId: service.id, variableName: proxyValue.domain }
|
||||
});
|
||||
parsedTemplate[realKey].fqdns.push({
|
||||
id,
|
||||
name: name2,
|
||||
value: found?.value || "",
|
||||
label,
|
||||
description,
|
||||
defaultValue,
|
||||
required
|
||||
});
|
||||
}
|
||||
}
|
||||
if (proxyValue.hostPort) {
|
||||
const variable = foundTemplate?.variables.find((v) => v.id === proxyValue.hostPort);
|
||||
if (variable) {
|
||||
const { id, name: name2, label, description, defaultValue, required = false } = variable;
|
||||
const found = await import_prisma.prisma.serviceSetting.findFirst({
|
||||
where: { serviceId: service.id, variableName: proxyValue.hostPort }
|
||||
});
|
||||
parsedTemplate[realKey].hostPorts.push({
|
||||
id,
|
||||
name: name2,
|
||||
value: found?.value || "",
|
||||
label,
|
||||
description,
|
||||
defaultValue,
|
||||
required
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parsedTemplate = foundTemplate;
|
||||
}
|
||||
let strParsedTemplate = JSON.stringify(parsedTemplate);
|
||||
strParsedTemplate = strParsedTemplate.replaceAll("$$id", service.id);
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(
|
||||
"$$core_version",
|
||||
service.version || foundTemplate.defaultVersion
|
||||
);
|
||||
if (workdir) {
|
||||
strParsedTemplate = strParsedTemplate.replaceAll("$$workdir", workdir);
|
||||
}
|
||||
if (service.serviceSetting.length > 0) {
|
||||
for (const setting of service.serviceSetting) {
|
||||
const { value, variableName } = setting;
|
||||
const regex = new RegExp(`\\$\\$config_${variableName.replace("$$config_", "")}"`, "gi");
|
||||
if (value === "$$generate_fqdn") {
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '"' || '"');
|
||||
} else if (value === "$$generate_fqdn_slash") {
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '/"');
|
||||
} else if (value === "$$generate_domain") {
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, (0, import_common.getDomain)(service.fqdn) + '"');
|
||||
} else if (service.destinationDocker?.network && value === "$$generate_network") {
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(
|
||||
regex,
|
||||
service.destinationDocker.network + '"'
|
||||
);
|
||||
} else {
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, value + '"');
|
||||
}
|
||||
}
|
||||
}
|
||||
if (service.serviceSecret.length > 0) {
|
||||
for (const secret of service.serviceSecret) {
|
||||
let { name, value } = secret;
|
||||
name = name.toLowerCase();
|
||||
const regexHashed = new RegExp(`\\$\\$hashed\\$\\$secret_${name}`, "gi");
|
||||
const regex = new RegExp(`\\$\\$secret_${name}`, "gi");
|
||||
if (value) {
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(
|
||||
regexHashed,
|
||||
import_bcryptjs.default.hashSync(value.replaceAll('"', '\\"'), 10)
|
||||
);
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, value.replaceAll('"', '\\"'));
|
||||
} else {
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(regexHashed, "");
|
||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, "");
|
||||
}
|
||||
}
|
||||
}
|
||||
parsedTemplate = JSON.parse(strParsedTemplate);
|
||||
}
|
||||
return parsedTemplate;
|
||||
}
|
||||
function generatePassword({
|
||||
length = 24,
|
||||
symbols = false,
|
||||
isHex = false
|
||||
}) {
|
||||
if (isHex) {
|
||||
return import_crypto.default.randomBytes(length).toString("hex");
|
||||
}
|
||||
const password = generator.generate({
|
||||
length,
|
||||
numbers: true,
|
||||
strict: true,
|
||||
symbols
|
||||
});
|
||||
return password;
|
||||
}
|
||||
async function getFreePublicPort({ id, remoteEngine, engine, remoteIpAddress }) {
|
||||
const { default: isReachable } = await import("is-port-reachable");
|
||||
const data = await import_prisma.prisma.setting.findFirst();
|
||||
const { minPort, maxPort } = data;
|
||||
if (remoteEngine) {
|
||||
const dbUsed = await (await import_prisma.prisma.database.findMany({
|
||||
where: {
|
||||
publicPort: { not: null },
|
||||
id: { not: id },
|
||||
destinationDocker: { remoteIpAddress }
|
||||
},
|
||||
select: { publicPort: true }
|
||||
})).map((a) => a.publicPort);
|
||||
const wpFtpUsed = await (await import_prisma.prisma.wordpress.findMany({
|
||||
where: {
|
||||
ftpPublicPort: { not: null },
|
||||
id: { not: id },
|
||||
service: { destinationDocker: { remoteIpAddress } }
|
||||
},
|
||||
select: { ftpPublicPort: true }
|
||||
})).map((a) => a.ftpPublicPort);
|
||||
const wpUsed = await (await import_prisma.prisma.wordpress.findMany({
|
||||
where: {
|
||||
mysqlPublicPort: { not: null },
|
||||
id: { not: id },
|
||||
service: { destinationDocker: { remoteIpAddress } }
|
||||
},
|
||||
select: { mysqlPublicPort: true }
|
||||
})).map((a) => a.mysqlPublicPort);
|
||||
const minioUsed = await (await import_prisma.prisma.minio.findMany({
|
||||
where: {
|
||||
publicPort: { not: null },
|
||||
id: { not: id },
|
||||
service: { destinationDocker: { remoteIpAddress } }
|
||||
},
|
||||
select: { publicPort: true }
|
||||
})).map((a) => a.publicPort);
|
||||
const usedPorts = [...dbUsed, ...wpFtpUsed, ...wpUsed, ...minioUsed];
|
||||
const range = (0, import_common.generateRangeArray)(minPort, maxPort);
|
||||
const availablePorts = range.filter((port) => !usedPorts.includes(port));
|
||||
for (const port of availablePorts) {
|
||||
const found = await isReachable(port, { host: remoteIpAddress });
|
||||
if (!found) {
|
||||
return port;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
const dbUsed = await (await import_prisma.prisma.database.findMany({
|
||||
where: { publicPort: { not: null }, id: { not: id }, destinationDocker: { engine } },
|
||||
select: { publicPort: true }
|
||||
})).map((a) => a.publicPort);
|
||||
const wpFtpUsed = await (await import_prisma.prisma.wordpress.findMany({
|
||||
where: {
|
||||
ftpPublicPort: { not: null },
|
||||
id: { not: id },
|
||||
service: { destinationDocker: { engine } }
|
||||
},
|
||||
select: { ftpPublicPort: true }
|
||||
})).map((a) => a.ftpPublicPort);
|
||||
const wpUsed = await (await import_prisma.prisma.wordpress.findMany({
|
||||
where: {
|
||||
mysqlPublicPort: { not: null },
|
||||
id: { not: id },
|
||||
service: { destinationDocker: { engine } }
|
||||
},
|
||||
select: { mysqlPublicPort: true }
|
||||
})).map((a) => a.mysqlPublicPort);
|
||||
const minioUsed = await (await import_prisma.prisma.minio.findMany({
|
||||
where: {
|
||||
publicPort: { not: null },
|
||||
id: { not: id },
|
||||
service: { destinationDocker: { engine } }
|
||||
},
|
||||
select: { publicPort: true }
|
||||
})).map((a) => a.publicPort);
|
||||
const usedPorts = [...dbUsed, ...wpFtpUsed, ...wpUsed, ...minioUsed];
|
||||
const range = (0, import_common.generateRangeArray)(minPort, maxPort);
|
||||
const availablePorts = range.filter((port) => !usedPorts.includes(port));
|
||||
for (const port of availablePorts) {
|
||||
const found = await isReachable(port, { host: "localhost" });
|
||||
if (!found) {
|
||||
return port;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
async function verifyAndDecryptServiceSecrets(id) {
|
||||
const secrets = await import_prisma.prisma.serviceSecret.findMany({ where: { serviceId: id } });
|
||||
let decryptedSecrets = secrets.map((secret) => {
|
||||
const { name, value } = secret;
|
||||
if (value) {
|
||||
let rawValue = (0, import_common.decrypt)(value);
|
||||
rawValue = rawValue.replaceAll(/\$/gi, "$$$");
|
||||
return { name, value: rawValue };
|
||||
}
|
||||
return { name, value };
|
||||
});
|
||||
return decryptedSecrets;
|
||||
}
|
||||
function persistentVolumes(id, persistentStorage, config) {
|
||||
let volumeSet = /* @__PURE__ */ new Set();
|
||||
if (Object.keys(config).length > 0) {
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
if (value.volumes) {
|
||||
for (const volume of value.volumes) {
|
||||
if (!volume.startsWith("/")) {
|
||||
volumeSet.add(volume);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const volumesArray = Array.from(volumeSet);
|
||||
const persistentVolume = persistentStorage?.map((storage) => {
|
||||
return `${id}${storage.path.replace(/\//gi, "-")}:${storage.path}`;
|
||||
}) || [];
|
||||
let volumes = [...persistentVolume];
|
||||
if (volumesArray)
|
||||
volumes = [...volumesArray, ...volumes];
|
||||
const composeVolumes = volumes.length > 0 && volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(":")[0]}`]: {
|
||||
name: volume.split(":")[0]
|
||||
}
|
||||
};
|
||||
}) || [];
|
||||
const volumeMounts = Object.assign({}, ...composeVolumes) || {};
|
||||
return { volumeMounts };
|
||||
}
|
||||
async function startServiceContainers(fastify, id, teamId, dockerId, composeFileDestination) {
|
||||
try {
|
||||
await (0, import_executeCommand.executeCommand)({ dockerId, command: `docker compose -f ${composeFileDestination} pull` });
|
||||
} catch (error) {
|
||||
}
|
||||
await (0, import_executeCommand.executeCommand)({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` });
|
||||
await (0, import_executeCommand.executeCommand)({ dockerId, command: `docker compose -f ${composeFileDestination} create` });
|
||||
await (0, import_executeCommand.executeCommand)({ dockerId, command: `docker compose -f ${composeFileDestination} start` });
|
||||
await (0, import_common.asyncSleep)(1e3);
|
||||
await (0, import_executeCommand.executeCommand)({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` });
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
generatePassword,
|
||||
getFreePublicPort,
|
||||
parseAndFindServiceTemplates,
|
||||
persistentVolumes,
|
||||
startServiceContainers,
|
||||
verifyAndDecryptServiceSecrets
|
||||
});
|
||||
108
apps/trpc-experimental/server/build/trpc/routers/settings.js
Normal file
108
apps/trpc-experimental/server/build/trpc/routers/settings.js
Normal file
@@ -0,0 +1,108 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var settings_exports = {};
|
||||
__export(settings_exports, {
|
||||
settingsRouter: () => settingsRouter
|
||||
});
|
||||
module.exports = __toCommonJS(settings_exports);
|
||||
var import_trpc = require("../trpc");
|
||||
var import_server = require("@trpc/server");
|
||||
var import_common = require("../../lib/common");
|
||||
var import_env = require("../../env");
|
||||
var import_jsonwebtoken = __toESM(require("jsonwebtoken"));
|
||||
const settingsRouter = (0, import_trpc.router)({
|
||||
getBaseSettings: import_trpc.publicProcedure.query(async () => {
|
||||
const settings = await (0, import_common.listSettings)();
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
isRegistrationEnabled: settings?.isRegistrationEnabled
|
||||
}
|
||||
};
|
||||
}),
|
||||
getInstanceSettings: import_trpc.privateProcedure.query(async ({ ctx }) => {
|
||||
try {
|
||||
const settings = await (0, import_common.listSettings)();
|
||||
let isAdmin = false;
|
||||
let permission = null;
|
||||
let token = null;
|
||||
let pendingInvitations = [];
|
||||
if (!settings) {
|
||||
throw new import_server.TRPCError({
|
||||
code: "INTERNAL_SERVER_ERROR",
|
||||
message: "An unexpected error occurred, please try again later."
|
||||
});
|
||||
}
|
||||
if (ctx.user) {
|
||||
const currentUser = await (0, import_common.getCurrentUser)(ctx.user.userId);
|
||||
if (currentUser) {
|
||||
const foundPermission = currentUser.permission.find(
|
||||
(p) => p.teamId === ctx.user?.teamId
|
||||
)?.permission;
|
||||
if (foundPermission) {
|
||||
permission = foundPermission;
|
||||
isAdmin = foundPermission === "owner" || foundPermission === "admin";
|
||||
}
|
||||
const payload = {
|
||||
userId: ctx.user?.userId,
|
||||
teamId: ctx.user?.teamId,
|
||||
permission,
|
||||
isAdmin,
|
||||
iat: Math.floor(Date.now() / 1e3)
|
||||
};
|
||||
token = import_jsonwebtoken.default.sign(payload, import_env.env.COOLIFY_SECRET_KEY);
|
||||
}
|
||||
pendingInvitations = await (0, import_common.getTeamInvitation)(ctx.user.userId);
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
token,
|
||||
userId: ctx.user?.userId,
|
||||
teamId: ctx.user?.teamId,
|
||||
permission,
|
||||
isAdmin,
|
||||
ipv4: ctx.user?.teamId ? settings.ipv4 : null,
|
||||
ipv6: ctx.user?.teamId ? settings.ipv6 : null,
|
||||
version: import_common.version,
|
||||
whiteLabeled: import_env.env.COOLIFY_WHITE_LABELED === "true",
|
||||
whiteLabeledIcon: import_env.env.COOLIFY_WHITE_LABELED_ICON,
|
||||
isRegistrationEnabled: settings.isRegistrationEnabled,
|
||||
pendingInvitations
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
throw new import_server.TRPCError({
|
||||
code: "INTERNAL_SERVER_ERROR",
|
||||
message: "An unexpected error occurred, please try again later.",
|
||||
cause: error
|
||||
});
|
||||
}
|
||||
})
|
||||
});
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
settingsRouter
|
||||
});
|
||||
@@ -0,0 +1,241 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var sources_exports = {};
|
||||
__export(sources_exports, {
|
||||
sourcesRouter: () => sourcesRouter
|
||||
});
|
||||
module.exports = __toCommonJS(sources_exports);
|
||||
var import_zod = require("zod");
|
||||
var import_trpc = require("../../trpc");
|
||||
var import_common = require("../../../lib/common");
|
||||
var import_prisma = require("../../../prisma");
|
||||
var import_cuid = __toESM(require("cuid"));
|
||||
const sourcesRouter = (0, import_trpc.router)({
|
||||
save: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
name: import_zod.z.string(),
|
||||
htmlUrl: import_zod.z.string(),
|
||||
apiUrl: import_zod.z.string(),
|
||||
customPort: import_zod.z.number(),
|
||||
customUser: import_zod.z.string(),
|
||||
isSystemWide: import_zod.z.boolean().default(false)
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
let { id, name, htmlUrl, apiUrl, customPort, customUser, isSystemWide } = input;
|
||||
if (customPort)
|
||||
customPort = Number(customPort);
|
||||
await import_prisma.prisma.gitSource.update({
|
||||
where: { id },
|
||||
data: { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide }
|
||||
});
|
||||
}),
|
||||
newGitHubApp: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
name: import_zod.z.string(),
|
||||
htmlUrl: import_zod.z.string(),
|
||||
apiUrl: import_zod.z.string(),
|
||||
organization: import_zod.z.string(),
|
||||
customPort: import_zod.z.number(),
|
||||
isSystemWide: import_zod.z.boolean().default(false)
|
||||
})
|
||||
).mutation(async ({ ctx, input }) => {
|
||||
const { teamId } = ctx.user;
|
||||
let { id, name, htmlUrl, apiUrl, organization, customPort, isSystemWide } = input;
|
||||
if (customPort)
|
||||
customPort = Number(customPort);
|
||||
if (id === "new") {
|
||||
const newId = (0, import_cuid.default)();
|
||||
await import_prisma.prisma.gitSource.create({
|
||||
data: {
|
||||
id: newId,
|
||||
name,
|
||||
htmlUrl,
|
||||
apiUrl,
|
||||
organization,
|
||||
customPort,
|
||||
isSystemWide,
|
||||
type: "github",
|
||||
teams: { connect: { id: teamId } }
|
||||
}
|
||||
});
|
||||
return {
|
||||
id: newId
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}),
|
||||
newGitLabApp: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string(),
|
||||
type: import_zod.z.string(),
|
||||
name: import_zod.z.string(),
|
||||
htmlUrl: import_zod.z.string(),
|
||||
apiUrl: import_zod.z.string(),
|
||||
oauthId: import_zod.z.number(),
|
||||
appId: import_zod.z.string(),
|
||||
appSecret: import_zod.z.string(),
|
||||
groupName: import_zod.z.string().optional().nullable(),
|
||||
customPort: import_zod.z.number().optional().nullable(),
|
||||
customUser: import_zod.z.string().optional().nullable()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { teamId } = ctx.user;
|
||||
let {
|
||||
id,
|
||||
type,
|
||||
name,
|
||||
htmlUrl,
|
||||
apiUrl,
|
||||
oauthId,
|
||||
appId,
|
||||
appSecret,
|
||||
groupName,
|
||||
customPort,
|
||||
customUser
|
||||
} = input;
|
||||
if (oauthId)
|
||||
oauthId = Number(oauthId);
|
||||
if (customPort)
|
||||
customPort = Number(customPort);
|
||||
const encryptedAppSecret = (0, import_common.encrypt)(appSecret);
|
||||
if (id === "new") {
|
||||
const newId = (0, import_cuid.default)();
|
||||
await import_prisma.prisma.gitSource.create({
|
||||
data: {
|
||||
id: newId,
|
||||
type,
|
||||
apiUrl,
|
||||
htmlUrl,
|
||||
name,
|
||||
customPort,
|
||||
customUser,
|
||||
teams: { connect: { id: teamId } }
|
||||
}
|
||||
});
|
||||
await import_prisma.prisma.gitlabApp.create({
|
||||
data: {
|
||||
teams: { connect: { id: teamId } },
|
||||
appId,
|
||||
oauthId,
|
||||
groupName,
|
||||
appSecret: encryptedAppSecret,
|
||||
gitSource: { connect: { id: newId } }
|
||||
}
|
||||
});
|
||||
return {
|
||||
status: 201,
|
||||
id: newId
|
||||
};
|
||||
} else {
|
||||
await import_prisma.prisma.gitSource.update({
|
||||
where: { id },
|
||||
data: { type, apiUrl, htmlUrl, name, customPort, customUser }
|
||||
});
|
||||
await import_prisma.prisma.gitlabApp.update({
|
||||
where: { id },
|
||||
data: {
|
||||
appId,
|
||||
oauthId,
|
||||
groupName,
|
||||
appSecret: encryptedAppSecret
|
||||
}
|
||||
});
|
||||
}
|
||||
}),
|
||||
delete: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string()
|
||||
})
|
||||
).mutation(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const source = await import_prisma.prisma.gitSource.delete({
|
||||
where: { id },
|
||||
include: { githubApp: true, gitlabApp: true }
|
||||
});
|
||||
if (source.githubAppId) {
|
||||
await import_prisma.prisma.githubApp.delete({ where: { id: source.githubAppId } });
|
||||
}
|
||||
if (source.gitlabAppId) {
|
||||
await import_prisma.prisma.gitlabApp.delete({ where: { id: source.gitlabAppId } });
|
||||
}
|
||||
}),
|
||||
getSourceById: import_trpc.privateProcedure.input(
|
||||
import_zod.z.object({
|
||||
id: import_zod.z.string()
|
||||
})
|
||||
).query(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const { teamId } = ctx.user;
|
||||
const settings = await import_prisma.prisma.setting.findFirst({});
|
||||
if (id === "new") {
|
||||
return {
|
||||
source: {
|
||||
name: null,
|
||||
type: null,
|
||||
htmlUrl: null,
|
||||
apiUrl: null,
|
||||
organization: null,
|
||||
customPort: 22,
|
||||
customUser: "git"
|
||||
},
|
||||
settings
|
||||
};
|
||||
}
|
||||
const source = await import_prisma.prisma.gitSource.findFirst({
|
||||
where: {
|
||||
id,
|
||||
OR: [
|
||||
{ teams: { some: { id: teamId === "0" ? void 0 : teamId } } },
|
||||
{ isSystemWide: true }
|
||||
]
|
||||
},
|
||||
include: { githubApp: true, gitlabApp: true }
|
||||
});
|
||||
if (!source) {
|
||||
throw { status: 404, message: "Source not found." };
|
||||
}
|
||||
if (source?.githubApp?.clientSecret)
|
||||
source.githubApp.clientSecret = (0, import_common.decrypt)(source.githubApp.clientSecret);
|
||||
if (source?.githubApp?.webhookSecret)
|
||||
source.githubApp.webhookSecret = (0, import_common.decrypt)(source.githubApp.webhookSecret);
|
||||
if (source?.githubApp?.privateKey)
|
||||
source.githubApp.privateKey = (0, import_common.decrypt)(source.githubApp.privateKey);
|
||||
if (source?.gitlabApp?.appSecret)
|
||||
source.gitlabApp.appSecret = (0, import_common.decrypt)(source.gitlabApp.appSecret);
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
source,
|
||||
settings
|
||||
}
|
||||
};
|
||||
})
|
||||
});
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
sourcesRouter
|
||||
});
|
||||
65
apps/trpc-experimental/server/build/trpc/trpc.js
Normal file
65
apps/trpc-experimental/server/build/trpc/trpc.js
Normal file
@@ -0,0 +1,65 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var trpc_exports = {};
|
||||
__export(trpc_exports, {
|
||||
privateProcedure: () => privateProcedure,
|
||||
publicProcedure: () => publicProcedure,
|
||||
router: () => router
|
||||
});
|
||||
module.exports = __toCommonJS(trpc_exports);
|
||||
var import_server = require("@trpc/server");
|
||||
var import_superjson = __toESM(require("superjson"));
|
||||
const t = import_server.initTRPC.context().create({
|
||||
transformer: import_superjson.default,
|
||||
errorFormatter({ shape }) {
|
||||
return shape;
|
||||
}
|
||||
});
|
||||
const logger = t.middleware(async ({ path, type, next }) => {
|
||||
const start = Date.now();
|
||||
const result = await next();
|
||||
const durationMs = Date.now() - start;
|
||||
result.ok ? console.log("OK request timing:", { path, type, durationMs }) : console.log("Non-OK request timing", { path, type, durationMs });
|
||||
return result;
|
||||
});
|
||||
const isAdmin = t.middleware(async ({ ctx, next }) => {
|
||||
if (!ctx.user) {
|
||||
throw new import_server.TRPCError({ code: "UNAUTHORIZED" });
|
||||
}
|
||||
return next({
|
||||
ctx: {
|
||||
user: ctx.user
|
||||
}
|
||||
});
|
||||
});
|
||||
const router = t.router;
|
||||
const privateProcedure = t.procedure.use(isAdmin);
|
||||
const publicProcedure = t.procedure;
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
privateProcedure,
|
||||
publicProcedure,
|
||||
router
|
||||
});
|
||||
0
apps/trpc-experimental/server/db/.gitkeep
Normal file
0
apps/trpc-experimental/server/db/.gitkeep
Normal file
BIN
apps/trpc-experimental/server/db/dev.db
Normal file
BIN
apps/trpc-experimental/server/db/dev.db
Normal file
Binary file not shown.
BIN
apps/trpc-experimental/server/db/migration.db
Normal file
BIN
apps/trpc-experimental/server/db/migration.db
Normal file
Binary file not shown.
BIN
apps/trpc-experimental/server/db/migration.db-journal
Normal file
BIN
apps/trpc-experimental/server/db/migration.db-journal
Normal file
Binary file not shown.
1013
apps/trpc-experimental/server/devTags.json
Normal file
1013
apps/trpc-experimental/server/devTags.json
Normal file
File diff suppressed because it is too large
Load Diff
3582
apps/trpc-experimental/server/devTemplates.yaml
Normal file
3582
apps/trpc-experimental/server/devTemplates.yaml
Normal file
File diff suppressed because it is too large
Load Diff
7
apps/trpc-experimental/server/nodemon.json
Normal file
7
apps/trpc-experimental/server/nodemon.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ignore": ["src/**/*.test.ts"],
|
||||
"ext": "ts,mjs,json,graphql",
|
||||
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' -o -name '*.js' \\)` --platform=node --outdir=build --format=cjs && node build",
|
||||
"legacyWatch": true
|
||||
}
|
||||
85
apps/trpc-experimental/server/package.json
Normal file
85
apps/trpc-experimental/server/package.json
Normal file
@@ -0,0 +1,85 @@
|
||||
{
|
||||
"name": "server",
|
||||
"description": "Coolify's Fastify API",
|
||||
"license": "Apache-2.0",
|
||||
"scripts": {
|
||||
"build": "rimraf ../../build && tsc --outDir ../../build",
|
||||
"dev-old": "tsx watch --clear-screen=false src",
|
||||
"dev": "nodemon",
|
||||
"lint": "prettier --plugin-search-dir . --check . && eslint .",
|
||||
"format": "prettier --plugin-search-dir . --write .",
|
||||
"test-dev": "start-server-and-test 'tsx src/server' http-get://localhost:2022 'tsx src/client'",
|
||||
"test-start": "start-server-and-test 'node dist/server' http-get://localhost:2022 'node dist/client'",
|
||||
"db:generate": "prisma generate",
|
||||
"db:push": "prisma db push && prisma generate",
|
||||
"db:seed": "prisma db seed",
|
||||
"db:studio": "prisma studio",
|
||||
"db:migrate": "DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name"
|
||||
},
|
||||
"dependencies": {
|
||||
"@breejs/ts-worker": "2.0.0",
|
||||
"@fastify/autoload": "5.6.0",
|
||||
"@fastify/cors": "8.2.0",
|
||||
"@fastify/env": "4.1.0",
|
||||
"@fastify/jwt": "6.5.0",
|
||||
"@fastify/static": "6.6.0",
|
||||
"@fastify/websocket": "7.1.1",
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@ladjs/graceful": "3.0.2",
|
||||
"@prisma/client": "4.6.1",
|
||||
"@trpc/client": "10.1.0",
|
||||
"@trpc/server": "10.1.0",
|
||||
"abort-controller": "3.0.0",
|
||||
"axe": "11.0.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bree": "9.1.2",
|
||||
"cabin": "11.0.1",
|
||||
"csvtojson": "2.0.10",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.11.6",
|
||||
"dotenv": "^16.0.3",
|
||||
"execa": "6.1.0",
|
||||
"fastify": "4.10.2",
|
||||
"fastify-plugin": "4.4.0",
|
||||
"got": "^12.5.3",
|
||||
"is-ip": "5.0.0",
|
||||
"is-port-reachable": "4.0.0",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonwebtoken": "9.0.0",
|
||||
"node-fetch": "3.3.0",
|
||||
"p-all": "4.0.0",
|
||||
"p-throttle": "5.0.0",
|
||||
"prisma": "4.6.1",
|
||||
"shell-quote": "^1.7.4",
|
||||
"ssh-config": "4.1.6",
|
||||
"strip-ansi": "7.0.1",
|
||||
"superjson": "1.11.0",
|
||||
"tslib": "2.4.1",
|
||||
"unique-names-generator": "4.7.1",
|
||||
"ws": "8.11.0",
|
||||
"zod": "3.19.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bcryptjs": "^2.4.2",
|
||||
"@types/js-yaml": "^4.0.5",
|
||||
"@types/jsonwebtoken": "^8.5.9",
|
||||
"@types/node": "18.11.9",
|
||||
"@types/node-fetch": "2.6.2",
|
||||
"@types/shell-quote": "^1.7.1",
|
||||
"@types/ws": "8.5.3",
|
||||
"esbuild": "0.15.15",
|
||||
"nodemon": "2.0.20",
|
||||
"npm-run-all": "4.1.5",
|
||||
"rimraf": "3.0.2",
|
||||
"start-server-and-test": "1.14.0",
|
||||
"tsx": "3.12.1",
|
||||
"typescript": "4.9.3",
|
||||
"wait-port": "1.0.4"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "restricted"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "tsx prisma/seed.ts"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,443 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"password" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Permission" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"userId" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Permission_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
|
||||
CONSTRAINT "Permission_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT,
|
||||
FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "TeamInvitation" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"uid" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"teamName" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Application" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"repository" TEXT,
|
||||
"configHash" TEXT,
|
||||
"branch" TEXT,
|
||||
"buildPack" TEXT,
|
||||
"projectId" INTEGER,
|
||||
"port" INTEGER,
|
||||
"installCommand" TEXT,
|
||||
"buildCommand" TEXT,
|
||||
"startCommand" TEXT,
|
||||
"baseDirectory" TEXT,
|
||||
"publishDirectory" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "BuildLog" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT,
|
||||
"buildId" TEXT NOT NULL,
|
||||
"line" TEXT NOT NULL,
|
||||
"time" INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Build" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"type" TEXT NOT NULL,
|
||||
"applicationId" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"commit" TEXT,
|
||||
"status" TEXT DEFAULT 'queued',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DestinationDocker" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"network" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"engine" TEXT NOT NULL,
|
||||
"remoteEngine" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCoolifyProxyUsed" BOOLEAN DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GithubApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"appId" INTEGER,
|
||||
"installationId" INTEGER,
|
||||
"clientId" TEXT,
|
||||
"clientSecret" TEXT,
|
||||
"webhookSecret" TEXT,
|
||||
"privateKey" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitlabApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"oauthId" INTEGER NOT NULL,
|
||||
"groupName" TEXT,
|
||||
"deployKeyId" INTEGER,
|
||||
"privateSshKey" TEXT,
|
||||
"publicSshKey" TEXT,
|
||||
"webhookToken" TEXT,
|
||||
"appId" TEXT,
|
||||
"appSecret" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Database" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"defaultDatabase" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"dbUser" TEXT,
|
||||
"dbUserPassword" TEXT,
|
||||
"rootUser" TEXT,
|
||||
"rootUserPassword" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Database_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Minio" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"rootUser" TEXT NOT NULL,
|
||||
"rootUserPassword" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Minio_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Vscodeserver" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"password" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Vscodeserver_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_TeamToUser" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ApplicationToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Application" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitSourceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitSource" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GithubAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GithubApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitlabAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitlabApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DestinationDockerToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "DestinationDocker" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DatabaseToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Database" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ServiceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Service" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_id_key" ON "User"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Application_fqdn_key" ON "Application"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_key" ON "Secret"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DestinationDocker_network_key" ON "DestinationDocker"("network");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GithubApp_name_key" ON "GithubApp"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_oauthId_key" ON "GitlabApp"("oauthId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_groupName_key" ON "GitlabApp"("groupName");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Minio_serviceId_key" ON "Minio"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Vscodeserver_serviceId_key" ON "Vscodeserver"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_TeamToUser_AB_unique" ON "_TeamToUser"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_TeamToUser_B_index" ON "_TeamToUser"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ApplicationToTeam_AB_unique" ON "_ApplicationToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ApplicationToTeam_B_index" ON "_ApplicationToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitSourceToTeam_AB_unique" ON "_GitSourceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitSourceToTeam_B_index" ON "_GitSourceToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GithubAppToTeam_AB_unique" ON "_GithubAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GithubAppToTeam_B_index" ON "_GithubAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitlabAppToTeam_AB_unique" ON "_GitlabAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitlabAppToTeam_B_index" ON "_GitlabAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DestinationDockerToTeam_AB_unique" ON "_DestinationDockerToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DestinationDockerToTeam_B_index" ON "_DestinationDockerToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DatabaseToTeam_AB_unique" ON "_DatabaseToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DatabaseToTeam_B_index" ON "_DatabaseToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ServiceToTeam_AB_unique" ON "_ServiceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ServiceToTeam_B_index" ON "_ServiceToTeam"("B");
|
||||
@@ -0,0 +1,28 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT
|
||||
);
|
||||
INSERT INTO "new_Team" ("createdAt", "databaseId", "id", "name", "serviceId", "updatedAt") SELECT "createdAt", "databaseId", "id", "name", "serviceId", "updatedAt" FROM "Team";
|
||||
DROP TABLE "Team";
|
||||
ALTER TABLE "new_Team" RENAME TO "Team";
|
||||
CREATE TABLE "new_DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"appendOnly" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_DatabaseSettings" ("createdAt", "databaseId", "id", "isPublic", "updatedAt") SELECT "createdAt", "databaseId", "id", "isPublic", "updatedAt" FROM "DatabaseSettings";
|
||||
DROP TABLE "DatabaseSettings";
|
||||
ALTER TABLE "new_DatabaseSettings" RENAME TO "DatabaseSettings";
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,11 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- A unique constraint covering the columns `[name,applicationId]` on the table `Secret` will be added. If there are existing duplicate values, this will fail.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "Secret_name_key";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_key" ON "Secret"("name", "applicationId");
|
||||
@@ -0,0 +1,47 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
CREATE TABLE "new_Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Service" ("createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version") SELECT "createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version" FROM "Service";
|
||||
DROP TABLE "Service";
|
||||
ALTER TABLE "new_Service" RENAME TO "Service";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isPRMRSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Secret" ("applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value") SELECT "applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value" FROM "Secret";
|
||||
DROP TABLE "Secret";
|
||||
ALTER TABLE "new_Secret" RENAME TO "Secret";
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_isPRMRSecret_key" ON "Secret"("name", "applicationId", "isPRMRSecret");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "proxyHash" TEXT;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ServiceSecret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
CONSTRAINT "ServiceSecret_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ServiceSecret_name_serviceId_key" ON "ServiceSecret"("name", "serviceId");
|
||||
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "phpModules" TEXT;
|
||||
@@ -0,0 +1,18 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationPersistentStorage" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"path" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationPersistentStorage_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_key" ON "ApplicationPersistentStorage"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_path_key" ON "ApplicationPersistentStorage"("path");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_path_key" ON "ApplicationPersistentStorage"("applicationId", "path");
|
||||
@@ -0,0 +1,19 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Ghost" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"defaultEmail" TEXT NOT NULL,
|
||||
"defaultPassword" TEXT NOT NULL,
|
||||
"mariadbUser" TEXT NOT NULL,
|
||||
"mariadbPassword" TEXT NOT NULL,
|
||||
"mariadbRootUser" TEXT NOT NULL,
|
||||
"mariadbRootUserPassword" TEXT NOT NULL,
|
||||
"mariadbDatabase" TEXT,
|
||||
"mariadbPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Ghost_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Ghost_serviceId_key" ON "Ghost"("serviceId");
|
||||
@@ -0,0 +1,4 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "pythonModule" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "pythonVariable" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "pythonWSGI" TEXT;
|
||||
@@ -0,0 +1,12 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "MeiliSearch" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"masterKey" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "MeiliSearch_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "MeiliSearch_serviceId_key" ON "MeiliSearch"("serviceId");
|
||||
@@ -0,0 +1,29 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"ftpUser" TEXT,
|
||||
"ftpPassword" TEXT,
|
||||
"ftpPublicPort" INTEGER,
|
||||
"ftpHostKey" TEXT,
|
||||
"ftpHostKeyPrivate" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
|
||||
DROP TABLE "Wordpress";
|
||||
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,5 @@
|
||||
-- DropIndex
|
||||
DROP INDEX "ApplicationPersistentStorage_path_key";
|
||||
|
||||
-- DropIndex
|
||||
DROP INDEX "ApplicationPersistentStorage_applicationId_key";
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "exposePort" INTEGER;
|
||||
@@ -0,0 +1,12 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ServicePersistentStorage" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"path" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ServicePersistentStorage_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ServicePersistentStorage_serviceId_path_key" ON "ServicePersistentStorage"("serviceId", "path");
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "dockerFileLocation" TEXT;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "denoMainFile" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "denoOptions" TEXT;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Build" ADD COLUMN "branch" TEXT;
|
||||
@@ -0,0 +1,17 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Umami" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"umamiAdminPassword" TEXT NOT NULL,
|
||||
"hashSalt" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Umami_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Umami_serviceId_key" ON "Umami"("serviceId");
|
||||
@@ -0,0 +1,22 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "baseBuildImage" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "baseImage" TEXT;
|
||||
@@ -0,0 +1,16 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Hasura" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"graphQLAdminPassword" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Hasura_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Hasura_serviceId_key" ON "Hasura"("serviceId");
|
||||
@@ -0,0 +1,25 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Fider" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"jwtSecret" TEXT NOT NULL,
|
||||
"emailNoreply" TEXT,
|
||||
"emailMailgunApiKey" TEXT,
|
||||
"emailMailgunDomain" TEXT,
|
||||
"emailMailgunRegion" TEXT,
|
||||
"emailSmtpHost" TEXT,
|
||||
"emailSmtpPort" INTEGER,
|
||||
"emailSmtpUser" TEXT,
|
||||
"emailSmtpPassword" TEXT,
|
||||
"emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId");
|
||||
@@ -0,0 +1,29 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Fider" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"jwtSecret" TEXT NOT NULL,
|
||||
"emailNoreply" TEXT,
|
||||
"emailMailgunApiKey" TEXT,
|
||||
"emailMailgunDomain" TEXT,
|
||||
"emailMailgunRegion" TEXT NOT NULL DEFAULT 'EU',
|
||||
"emailSmtpHost" TEXT,
|
||||
"emailSmtpPort" INTEGER,
|
||||
"emailSmtpUser" TEXT,
|
||||
"emailSmtpPassword" TEXT,
|
||||
"emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Fider" ("createdAt", "emailMailgunApiKey", "emailMailgunDomain", "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt") SELECT "createdAt", "emailMailgunApiKey", "emailMailgunDomain", coalesce("emailMailgunRegion", 'EU') AS "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt" FROM "Fider";
|
||||
DROP TABLE "Fider";
|
||||
ALTER TABLE "new_Fider" RENAME TO "Fider";
|
||||
CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,23 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Service" ADD COLUMN "exposePort" INTEGER;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"scriptName" TEXT NOT NULL DEFAULT 'plausible.js',
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_PlausibleAnalytics" ("createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username") SELECT "createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username" FROM "PlausibleAnalytics";
|
||||
DROP TABLE "PlausibleAnalytics";
|
||||
ALTER TABLE "new_PlausibleAnalytics" RENAME TO "PlausibleAnalytics";
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,32 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"ownMysql" BOOLEAN NOT NULL DEFAULT false,
|
||||
"mysqlHost" TEXT,
|
||||
"mysqlPort" INTEGER,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"ftpUser" TEXT,
|
||||
"ftpPassword" TEXT,
|
||||
"ftpPublicPort" INTEGER,
|
||||
"ftpHostKey" TEXT,
|
||||
"ftpHostKeyPrivate" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
|
||||
DROP TABLE "Wordpress";
|
||||
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Minio" ADD COLUMN "apiFqdn" TEXT;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "deploymentType" TEXT;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"customPort" INTEGER NOT NULL DEFAULT 22,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt" FROM "GitSource";
|
||||
DROP TABLE "GitSource";
|
||||
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Moodle" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"defaultUsername" TEXT NOT NULL,
|
||||
"defaultPassword" TEXT NOT NULL,
|
||||
"defaultEmail" TEXT NOT NULL,
|
||||
"mariadbUser" TEXT NOT NULL,
|
||||
"mariadbPassword" TEXT NOT NULL,
|
||||
"mariadbRootUser" TEXT NOT NULL,
|
||||
"mariadbRootUserPassword" TEXT NOT NULL,
|
||||
"mariadbDatabase" TEXT NOT NULL,
|
||||
"mariadbPublicPort" INTEGER,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Moodle_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Moodle_serviceId_key" ON "Moodle"("serviceId");
|
||||
@@ -0,0 +1,21 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_DestinationDocker" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"network" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"engine" TEXT,
|
||||
"remoteEngine" BOOLEAN NOT NULL DEFAULT false,
|
||||
"remoteIpAddress" TEXT,
|
||||
"remoteUser" TEXT,
|
||||
"remotePort" INTEGER,
|
||||
"isCoolifyProxyUsed" BOOLEAN DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_DestinationDocker" ("createdAt", "engine", "id", "isCoolifyProxyUsed", "name", "network", "remoteEngine", "updatedAt") SELECT "createdAt", "engine", "id", "isCoolifyProxyUsed", "name", "network", "remoteEngine", "updatedAt" FROM "DestinationDocker";
|
||||
DROP TABLE "DestinationDocker";
|
||||
ALTER TABLE "new_DestinationDocker" RENAME TO "DestinationDocker";
|
||||
CREATE UNIQUE INDEX "DestinationDocker_network_key" ON "DestinationDocker"("network");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,33 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "SshKey" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"privateKey" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_DestinationDocker" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"network" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"engine" TEXT,
|
||||
"remoteEngine" BOOLEAN NOT NULL DEFAULT false,
|
||||
"remoteIpAddress" TEXT,
|
||||
"remoteUser" TEXT,
|
||||
"remotePort" INTEGER,
|
||||
"remoteVerified" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCoolifyProxyUsed" BOOLEAN DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"sshKeyId" TEXT,
|
||||
CONSTRAINT "DestinationDocker_sshKeyId_fkey" FOREIGN KEY ("sshKeyId") REFERENCES "SshKey" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_DestinationDocker" ("createdAt", "engine", "id", "isCoolifyProxyUsed", "name", "network", "remoteEngine", "remoteIpAddress", "remotePort", "remoteUser", "updatedAt") SELECT "createdAt", "engine", "id", "isCoolifyProxyUsed", "name", "network", "remoteEngine", "remoteIpAddress", "remotePort", "remoteUser", "updatedAt" FROM "DestinationDocker";
|
||||
DROP TABLE "DestinationDocker";
|
||||
ALTER TABLE "new_DestinationDocker" RENAME TO "DestinationDocker";
|
||||
CREATE UNIQUE INDEX "DestinationDocker_network_key" ON "DestinationDocker"("network");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "ipv4" TEXT;
|
||||
ALTER TABLE "Setting" ADD COLUMN "ipv6" TEXT;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user