Merge branch 'next' into grafana-service

This commit is contained in:
Andras Bacsai
2022-09-20 14:50:37 +02:00
committed by GitHub
144 changed files with 4815 additions and 4349 deletions

View File

@@ -38,8 +38,16 @@ import * as buildpacks from '../lib/buildPacks';
for (const queueBuild of queuedBuilds) {
actions.push(async () => {
let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, forceRebuild } = queueBuild
let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, previewApplicationId = null, forceRebuild } = queueBuild
application = decryptApplication(application)
const originalApplicationId = application.id
if (pullmergeRequestId) {
const previewApplications = await prisma.previewApplication.findMany({ where: { applicationId: originalApplicationId, pullmergeRequestId } })
if (previewApplications.length > 0) {
previewApplicationId = previewApplications[0].id
}
}
const usableApplicationId = previewApplicationId || originalApplicationId
try {
if (queueBuild.status === 'running') {
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
@@ -104,17 +112,17 @@ import * as buildpacks from '../lib/buildPacks';
)
.digest('hex');
const { debug } = settings;
if (concurrency === 1) {
await prisma.build.updateMany({
where: {
status: { in: ['queued', 'running'] },
id: { not: buildId },
applicationId,
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
},
data: { status: 'failed' }
});
}
// if (concurrency === 1) {
// await prisma.build.updateMany({
// where: {
// status: { in: ['queued', 'running'] },
// id: { not: buildId },
// applicationId,
// createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
// },
// data: { status: 'failed' }
// });
// }
let imageId = applicationId;
let domain = getDomain(fqdn);
const volumes =
@@ -261,7 +269,10 @@ import * as buildpacks from '../lib/buildPacks';
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
envs.push(`${secret.name}=${isSecretFound[0].value}`);
} else {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
@@ -335,10 +346,15 @@ import * as buildpacks from '../lib/buildPacks';
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
await prisma.build.updateMany({
where: { id: buildId, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
}
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
@@ -350,11 +366,18 @@ import * as buildpacks from '../lib/buildPacks';
}
}
catch (error) {
await prisma.build.updateMany({
where: { id: buildId, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
await saveBuildLog({ line: error, buildId, applicationId: application.id });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
}
if (error !== 1) {
await saveBuildLog({ line: error, buildId, applicationId: application.id });
}
}
});
}

View File

@@ -29,7 +29,7 @@ async function autoUpdater() {
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
);
await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
);
}
} else {

View File

@@ -1,4 +1,4 @@
import { base64Encode, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
import { base64Encode, encrypt, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
import { promises as fs } from 'fs';
import { day } from "../dayjs";
@@ -461,17 +461,32 @@ export const saveBuildLog = async ({
buildId: string;
applicationId: string;
}): Promise<any> => {
const { default: got } = await import('got')
if (line && typeof line === 'string' && line.includes('ghs_')) {
const regex = /ghs_.*@/g;
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
}
const addTimestamp = `[${generateTimestamp()}] ${line}`;
if (isDev) console.debug(`[${applicationId}] ${addTimestamp}`);
return await prisma.buildLog.create({
data: {
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
}
});
const fluentBitUrl = isDev ? 'http://localhost:24224' : 'http://coolify-fluentbit:24224';
if (isDev) {
console.debug(`[${applicationId}] ${addTimestamp}`);
}
try {
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
json: {
line: encrypt(line)
}
})
} catch(error) {
return await prisma.buildLog.create({
data: {
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
}
});
}
};
export async function copyBaseConfigurationFiles(
@@ -556,7 +571,6 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
);
}
export async function buildImage({
applicationId,
tag,
@@ -677,8 +691,6 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
secrets,
pullmergeRequestId
} = data;
const isPnpm = checkPnpm(installCommand, buildCommand);
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild}`);
@@ -688,7 +700,10 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
@@ -722,7 +737,10 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -28,6 +28,7 @@ export default async function (data) {
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
// TODO: fix secrets
if (
(pullmergeRequestId && secret.isPRMRSecret) ||
(!pullmergeRequestId && !secret.isPRMRSecret)

View File

@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -23,7 +23,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -16,7 +16,10 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -21,7 +21,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -24,7 +24,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,7 @@ export function formatLabelsOnDocker(data) {
return container
})
}
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<boolean> {
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
let containerFound = false;
try {
const { stdout } = await executeDockerCmd({
@@ -21,10 +21,12 @@ export async function checkContainer({ dockerId, container, remove = false }: {
command:
`docker inspect --format '{{json .State}}' ${container}`
});
containerFound = true
const parsedStdout = JSON.parse(stdout);
const status = parsedStdout.Status;
const isRunning = status === 'running';
const isRestarting = status === 'restarting'
const isExited = status === 'exited'
if (status === 'created') {
await executeDockerCmd({
dockerId,
@@ -39,13 +41,23 @@ export async function checkContainer({ dockerId, container, remove = false }: {
`docker rm ${container}`
});
}
if (isRunning) {
containerFound = true;
}
return {
found: containerFound,
status: {
isRunning,
isRestarting,
isExited
}
};
} catch (err) {
// Container not found
}
return containerFound;
return {
found: false
};
}
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {

View File

@@ -10,7 +10,8 @@ export default async function ({
branch,
buildId,
privateSshKey,
customPort
customPort,
forPublic
}: {
applicationId: string;
workdir: string;
@@ -21,11 +22,15 @@ export default async function ({
repodir: string;
privateSshKey: string;
customPort: number;
forPublic: boolean;
}): Promise<string> {
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
if (!forPublic) {
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
}
await saveBuildLog({
line: `Cloning ${repository}:${branch} branch.`,
@@ -33,9 +38,16 @@ export default async function ({
applicationId
});
await asyncExecShell(
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
);
if (forPublic) {
await asyncExecShell(
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
);
} else {
await asyncExecShell(
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
);
}
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
return commit.replace('\n', '');
}

View File

@@ -5,6 +5,7 @@ import bcrypt from 'bcryptjs';
import { ServiceStartStop } from '../../routes/api/v1/services/types';
import { asyncSleep, ComposeFile, createDirectories, defaultComposeConfiguration, errorHandler, executeDockerCmd, getDomain, getFreePublicPort, getServiceFromDB, getServiceImage, getServiceMainPort, isARM, isDev, makeLabelForServices, persistentVolumes, prisma } from '../common';
import { defaultServiceConfigurations } from '../services';
import { OnlyId } from '../../types';
export async function startService(request: FastifyRequest<ServiceStartStop>) {
try {
@@ -317,7 +318,7 @@ async function startMinioService(request: FastifyRequest<ServiceStartStop>) {
destinationDocker,
persistentStorage,
exposePort,
minio: { rootUser, rootUserPassword },
minio: { rootUser, rootUserPassword, apiFqdn },
serviceSecret
} = service;
@@ -336,7 +337,7 @@ async function startMinioService(request: FastifyRequest<ServiceStartStop>) {
image: `${image}:${version}`,
volumes: [`${id}-minio-data:/data`],
environmentVariables: {
MINIO_SERVER_URL: fqdn,
MINIO_SERVER_URL: apiFqdn,
MINIO_DOMAIN: getDomain(fqdn),
MINIO_ROOT_USER: rootUser,
MINIO_ROOT_PASSWORD: rootUserPassword,
@@ -658,7 +659,7 @@ async function startLanguageToolService(request: FastifyRequest<ServiceStartStop
image: config.languagetool.image,
environment: config.languagetool.environmentVariables,
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
volumes: config.languagetool,
volumes: config.languagetool.volumes,
labels: makeLabelForServices('languagetool'),
...defaultComposeConfiguration(network),
}
@@ -713,7 +714,7 @@ async function startN8nService(request: FastifyRequest<ServiceStartStop>) {
[id]: {
container_name: id,
image: config.n8n.image,
volumes: config.n8n,
volumes: config.n8n.volumes,
environment: config.n8n.environmentVariables,
labels: makeLabelForServices('n8n'),
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
@@ -1009,79 +1010,136 @@ async function startUmamiService(request: FastifyRequest<ServiceStartStop>) {
}
const initDbSQL = `
drop table if exists event;
drop table if exists pageview;
drop table if exists session;
drop table if exists website;
drop table if exists account;
create table account (
user_id serial primary key,
username varchar(255) unique not null,
password varchar(60) not null,
is_admin bool not null default false,
created_at timestamp with time zone default current_timestamp,
updated_at timestamp with time zone default current_timestamp
);
create table website (
website_id serial primary key,
website_uuid uuid unique not null,
user_id int not null references account(user_id) on delete cascade,
name varchar(100) not null,
domain varchar(500),
share_id varchar(64) unique,
created_at timestamp with time zone default current_timestamp
);
create table session (
session_id serial primary key,
session_uuid uuid unique not null,
website_id int not null references website(website_id) on delete cascade,
created_at timestamp with time zone default current_timestamp,
hostname varchar(100),
browser varchar(20),
os varchar(20),
device varchar(20),
screen varchar(11),
language varchar(35),
country char(2)
);
create table pageview (
view_id serial primary key,
website_id int not null references website(website_id) on delete cascade,
session_id int not null references session(session_id) on delete cascade,
created_at timestamp with time zone default current_timestamp,
url varchar(500) not null,
referrer varchar(500)
);
create table event (
event_id serial primary key,
website_id int not null references website(website_id) on delete cascade,
session_id int not null references session(session_id) on delete cascade,
created_at timestamp with time zone default current_timestamp,
url varchar(500) not null,
event_type varchar(50) not null,
event_value varchar(50) not null
);
create index website_user_id_idx on website(user_id);
create index session_created_at_idx on session(created_at);
create index session_website_id_idx on session(website_id);
create index pageview_created_at_idx on pageview(created_at);
create index pageview_website_id_idx on pageview(website_id);
create index pageview_session_id_idx on pageview(session_id);
create index pageview_website_id_created_at_idx on pageview(website_id, created_at);
create index pageview_website_id_session_id_created_at_idx on pageview(website_id, session_id, created_at);
create index event_created_at_idx on event(created_at);
create index event_website_id_idx on event(website_id);
create index event_session_id_idx on event(session_id);
-- CreateTable
CREATE TABLE "account" (
"user_id" SERIAL NOT NULL,
"username" VARCHAR(255) NOT NULL,
"password" VARCHAR(60) NOT NULL,
"is_admin" BOOLEAN NOT NULL DEFAULT false,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("user_id")
);
-- CreateTable
CREATE TABLE "event" (
"event_id" SERIAL NOT NULL,
"website_id" INTEGER NOT NULL,
"session_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"url" VARCHAR(500) NOT NULL,
"event_type" VARCHAR(50) NOT NULL,
"event_value" VARCHAR(50) NOT NULL,
PRIMARY KEY ("event_id")
);
-- CreateTable
CREATE TABLE "pageview" (
"view_id" SERIAL NOT NULL,
"website_id" INTEGER NOT NULL,
"session_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"url" VARCHAR(500) NOT NULL,
"referrer" VARCHAR(500),
PRIMARY KEY ("view_id")
);
-- CreateTable
CREATE TABLE "session" (
"session_id" SERIAL NOT NULL,
"session_uuid" UUID NOT NULL,
"website_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"hostname" VARCHAR(100),
"browser" VARCHAR(20),
"os" VARCHAR(20),
"device" VARCHAR(20),
"screen" VARCHAR(11),
"language" VARCHAR(35),
"country" CHAR(2),
PRIMARY KEY ("session_id")
);
-- CreateTable
CREATE TABLE "website" (
"website_id" SERIAL NOT NULL,
"website_uuid" UUID NOT NULL,
"user_id" INTEGER NOT NULL,
"name" VARCHAR(100) NOT NULL,
"domain" VARCHAR(500),
"share_id" VARCHAR(64),
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("website_id")
);
-- CreateIndex
CREATE UNIQUE INDEX "account.username_unique" ON "account"("username");
-- CreateIndex
CREATE INDEX "event_created_at_idx" ON "event"("created_at");
-- CreateIndex
CREATE INDEX "event_session_id_idx" ON "event"("session_id");
-- CreateIndex
CREATE INDEX "event_website_id_idx" ON "event"("website_id");
-- CreateIndex
CREATE INDEX "pageview_created_at_idx" ON "pageview"("created_at");
-- CreateIndex
CREATE INDEX "pageview_session_id_idx" ON "pageview"("session_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_created_at_idx" ON "pageview"("website_id", "created_at");
-- CreateIndex
CREATE INDEX "pageview_website_id_idx" ON "pageview"("website_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_session_id_created_at_idx" ON "pageview"("website_id", "session_id", "created_at");
-- CreateIndex
CREATE UNIQUE INDEX "session.session_uuid_unique" ON "session"("session_uuid");
-- CreateIndex
CREATE INDEX "session_created_at_idx" ON "session"("created_at");
-- CreateIndex
CREATE INDEX "session_website_id_idx" ON "session"("website_id");
-- CreateIndex
CREATE UNIQUE INDEX "website.website_uuid_unique" ON "website"("website_uuid");
-- CreateIndex
CREATE UNIQUE INDEX "website.share_id_unique" ON "website"("share_id");
-- CreateIndex
CREATE INDEX "website_user_id_idx" ON "website"("user_id");
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "session" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "website" ADD FOREIGN KEY ("user_id") REFERENCES "account"("user_id") ON DELETE CASCADE ON UPDATE CASCADE;
insert into account (username, password, is_admin) values ('admin', '${bcrypt.hashSync(
umamiAdminPassword,
10
@@ -1119,7 +1177,6 @@ async function startUmamiService(request: FastifyRequest<ServiceStartStop>) {
},
volumes: volumeMounts
};
console.log(composeFile)
const composeFileDestination = `${workdir}/docker-compose.yaml`;
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
await startServiceContainers(destinationDocker.id, composeFileDestination)
@@ -1321,10 +1378,6 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
const teamId = request.user.teamId;
const { version, fqdn, destinationDocker, secrets, exposePort, network, port, workdir, image, appwrite } = await defaultServiceConfigurations({ id, teamId })
let isStatsEnabled = false
if (secrets.find(s => s === '_APP_USAGE_STATS=enabled')) {
isStatsEnabled = true
}
const {
opensslKeyV1,
executorSecret,
@@ -1702,50 +1755,48 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
},
};
if (isStatsEnabled) {
dockerCompose[id].depends_on.push(`${id}-influxdb`);
dockerCompose[`${id}-usage`] = {
image: `${image}:${version}`,
container_name: `${id}-usage`,
labels: makeLabelForServices('appwrite'),
entrypoint: "usage",
depends_on: [
`${id}-mariadb`,
`${id}-influxdb`,
],
environment: [
"_APP_ENV=production",
`_APP_OPENSSL_KEY_V1=${opensslKeyV1}`,
`_APP_DB_HOST=${mariadbHost}`,
`_APP_DB_PORT=${mariadbPort}`,
`_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`,
`_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086",
`_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379",
...secrets
],
...defaultComposeConfiguration(network),
}
dockerCompose[`${id}-influxdb`] = {
image: "appwrite/influxdb:1.5.0",
container_name: `${id}-influxdb`,
volumes: [
`${id}-influxdb:/var/lib/influxdb:rw`
],
...defaultComposeConfiguration(network),
}
dockerCompose[`${id}-telegraf`] = {
image: "appwrite/telegraf:1.4.0",
container_name: `${id}-telegraf`,
environment: [
`_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086",
],
...defaultComposeConfiguration(network),
}
dockerCompose[id].depends_on.push(`${id}-influxdb`);
dockerCompose[`${id}-usage`] = {
image: `${image}:${version}`,
container_name: `${id}-usage`,
labels: makeLabelForServices('appwrite'),
entrypoint: "usage",
depends_on: [
`${id}-mariadb`,
`${id}-influxdb`,
],
environment: [
"_APP_ENV=production",
`_APP_OPENSSL_KEY_V1=${opensslKeyV1}`,
`_APP_DB_HOST=${mariadbHost}`,
`_APP_DB_PORT=${mariadbPort}`,
`_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`,
`_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086",
`_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379",
...secrets
],
...defaultComposeConfiguration(network),
}
dockerCompose[`${id}-influxdb`] = {
image: "appwrite/influxdb:1.5.0",
container_name: `${id}-influxdb`,
volumes: [
`${id}-influxdb:/var/lib/influxdb:rw`
],
...defaultComposeConfiguration(network),
}
dockerCompose[`${id}-telegraf`] = {
image: "appwrite/telegraf:1.4.0",
container_name: `${id}-telegraf`,
environment: [
`_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086",
],
...defaultComposeConfiguration(network),
}
const composeFile: any = {
@@ -2646,4 +2697,25 @@ async function startGrafanaService(request: FastifyRequest<ServiceStartStop>) {
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
}
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
try {
const { id } = request.params
const teamId = request.user.teamId;
const {
destinationDockerId,
destinationDocker,
} = await getServiceFromDB({ id, teamId });
if (destinationDockerId) {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker exec ${id} migrate`
})
return await reply.code(201).send()
}
throw { status: 500, message: 'Could cleanup logs.' }
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}

View File

@@ -1,3 +1,24 @@
/*
Example of a supported version:
{
// Name used to identify the service internally
name: 'umami',
// Fancier name to show to the user
fancyName: 'Umami',
// Docker base image for the service
baseImage: 'ghcr.io/mikecao/umami',
// Optional: If there is any dependent image, you should list it here
images: [],
// Usable tags
versions: ['postgresql-latest'],
// Which tag is the recommended
recommendedVersion: 'postgresql-latest',
// Application's default port, Umami listens on 3000
ports: {
main: 3000
}
}
*/
export const supportedServiceTypesAndVersions = [
{
name: 'plausibleanalytics',
@@ -116,7 +137,7 @@ export const supportedServiceTypesAndVersions = [
{
name: 'umami',
fancyName: 'Umami',
baseImage: 'ghcr.io/mikecao/umami',
baseImage: 'ghcr.io/umami-software/umami',
images: ['postgres:12-alpine'],
versions: ['postgresql-latest'],
recommendedVersion: 'postgresql-latest',
@@ -151,8 +172,8 @@ export const supportedServiceTypesAndVersions = [
fancyName: 'Appwrite',
baseImage: 'appwrite/appwrite',
images: ['mariadb:10.7', 'redis:6.2-alpine', 'appwrite/telegraf:1.4.0'],
versions: ['latest', '0.15.3'],
recommendedVersion: '0.15.3',
versions: ['latest', '1.0','0.15.3'],
recommendedVersion: '1.0',
ports: {
main: 80
}

View File

@@ -5,6 +5,7 @@ import axios from 'axios';
import { FastifyReply } from 'fastify';
import fs from 'fs/promises';
import yaml from 'js-yaml';
import csv from 'csvtojson';
import { day } from '../../../../lib/dayjs';
import { makeLabelForStandaloneApplication, setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
@@ -12,8 +13,9 @@ import { checkDomainsIsValidInDNS, checkDoubleBranch, checkExposedPort, createDi
import { checkContainer, formatLabelsOnDocker, isContainerExited, removeContainer } from '../../../../lib/docker';
import type { FastifyRequest } from 'fastify';
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, GetBuildLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication } from './types';
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication, RestartPreviewApplication, GetBuilds } from './types';
import { OnlyId } from '../../../../types';
import path from 'node:path';
function filterObject(obj, callback) {
return Object.fromEntries(Object.entries(obj).
@@ -74,14 +76,19 @@ export async function getApplicationStatus(request: FastifyRequest<OnlyId>) {
const { teamId } = request.user
let isRunning = false;
let isExited = false;
let isRestarting = false;
const application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId) {
isRunning = await checkContainer({ dockerId: application.destinationDocker.id, container: id });
isExited = await isContainerExited(application.destinationDocker.id, id);
const status = await checkContainer({ dockerId: application.destinationDocker.id, container: id });
if (status?.found) {
isRunning = status.status.isRunning;
isExited = status.status.isExited;
isRestarting = status.status.isRestarting
}
}
return {
isRunning,
isRestarting,
isExited,
};
} catch ({ status, message }) {
@@ -157,7 +164,8 @@ export async function getApplicationFromDB(id: string, teamId: string) {
gitSource: { include: { githubApp: true, gitlabApp: true } },
secrets: true,
persistentStorage: true,
connectedDatabase: true
connectedDatabase: true,
previewApplication: true
}
});
if (!application) {
@@ -339,10 +347,11 @@ export async function stopPreviewApplication(request: FastifyRequest<StopPreview
if (application?.destinationDockerId) {
const container = `${id}-${pullmergeRequestId}`
const { id: dockerId } = application.destinationDocker;
const found = await checkContainer({ dockerId, container });
const { found } = await checkContainer({ dockerId, container });
if (found) {
await removeContainer({ id: container, dockerId: application.destinationDocker.id });
}
await prisma.previewApplication.deleteMany({ where: { applicationId: application.id, pullmergeRequestId } })
}
return reply.code(201).send();
} catch ({ status, message }) {
@@ -366,7 +375,10 @@ export async function restartApplication(request: FastifyRequest<OnlyId>, reply:
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
envs.push(`${secret.name}=${isSecretFound[0].value}`);
} else {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
@@ -463,7 +475,7 @@ export async function stopApplication(request: FastifyRequest<OnlyId>, reply: Fa
const application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId) {
const { id: dockerId } = application.destinationDocker;
const found = await checkContainer({ dockerId, container: id });
const { found } = await checkContainer({ dockerId, container: id });
if (found) {
await removeContainer({ id, dockerId: application.destinationDocker.id });
}
@@ -607,7 +619,7 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
githubAppId: application.gitSource?.githubApp?.id,
gitlabAppId: application.gitSource?.gitlabApp?.id,
status: 'queued',
type: 'manual'
type: pullmergeRequestId ? application.gitSource?.githubApp?.id ? 'manual_pr' : 'manual_mr' : 'manual'
}
});
return {
@@ -798,7 +810,6 @@ export async function saveSecret(request: FastifyRequest<SaveSecret>, reply: Fas
try {
const { id } = request.params
let { name, value, isBuildSecret, isPRMRSecret, isNew } = request.body
if (isNew) {
const found = await prisma.secret.findFirst({ where: { name, applicationId: id, isPRMRSecret } });
if (found) {
@@ -810,14 +821,24 @@ export async function saveSecret(request: FastifyRequest<SaveSecret>, reply: Fas
});
}
} else {
value = encrypt(value.trim());
if (value) {
value = encrypt(value.trim());
}
const found = await prisma.secret.findFirst({ where: { applicationId: id, name, isPRMRSecret } });
if (found) {
await prisma.secret.updateMany({
where: { applicationId: id, name, isPRMRSecret },
data: { value, isBuildSecret, isPRMRSecret }
});
if (!value && isPRMRSecret) {
await prisma.secret.deleteMany({
where: { applicationId: id, name, isPRMRSecret }
});
} else {
await prisma.secret.updateMany({
where: { applicationId: id, name, isPRMRSecret },
data: { value, isBuildSecret, isPRMRSecret }
});
}
} else {
await prisma.secret.create({
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
@@ -884,6 +905,181 @@ export async function deleteStorage(request: FastifyRequest<DeleteStorage>) {
}
}
export async function restartPreview(request: FastifyRequest<RestartPreviewApplication>, reply: FastifyReply) {
try {
const { id, pullmergeRequestId } = request.params
const { teamId } = request.user
let application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId) {
const buildId = cuid();
const { id: dockerId, network } = application.destinationDocker;
const { secrets, port, repository, persistentStorage, id: applicationId, buildPack, exposePort } = application;
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
envs.push(`${secret.name}=${isSecretFound[0].value}`);
} else {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
const { workdir } = await createDirectories({ repository, buildId });
const labels = []
let image = null
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}-${pullmergeRequestId}' --format '{{json .}}'` })
const containersArray = container.trim().split('\n');
for (const container of containersArray) {
const containerObj = formatLabelsOnDocker(container);
image = containerObj[0].Image
Object.keys(containerObj[0].Labels).forEach(function (key) {
if (key.startsWith('coolify')) {
labels.push(`${key}=${containerObj[0].Labels[key]}`)
}
})
}
let imageFound = false;
try {
await executeDockerCmd({
dockerId,
command: `docker image inspect ${image}`
})
imageFound = true;
} catch (error) {
//
}
if (!imageFound) {
throw { status: 500, message: 'Image not found, cannot restart application.' }
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
}) || [];
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[`${applicationId}-${pullmergeRequestId}`]: {
image,
container_name: `${applicationId}-${pullmergeRequestId}`,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
...defaultComposeConfiguration(network),
}
},
networks: {
[network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}-${pullmergeRequestId}` })
await executeDockerCmd({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` })
await executeDockerCmd({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
return reply.code(201).send();
}
throw { status: 500, message: 'Application cannot be restarted.' }
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getPreviewStatus(request: FastifyRequest<RestartPreviewApplication>) {
try {
const { id, pullmergeRequestId } = request.params
const { teamId } = request.user
let isRunning = false;
let isExited = false;
let isRestarting = false;
let isBuilding = false
const application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId) {
const status = await checkContainer({ dockerId: application.destinationDocker.id, container: `${id}-${pullmergeRequestId}` });
if (status?.found) {
isRunning = status.status.isRunning;
isExited = status.status.isExited;
isRestarting = status.status.isRestarting
}
const building = await prisma.build.findMany({ where: { applicationId: id, pullmergeRequestId, status: { in: ['queued', 'running'] } } })
isBuilding = building.length > 0
}
return {
isBuilding,
isRunning,
isRestarting,
isExited,
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function loadPreviews(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
const { stdout } = await executeDockerCmd({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
if (stdout === '') {
throw { status: 500, message: 'No previews found.' }
}
const containers = formatLabelsOnDocker(stdout).filter(container => container.Labels['coolify.configuration'] && container.Labels['coolify.type'] === 'standalone-application')
const jsonContainers = containers
.map((container) =>
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
)
.filter((container) => {
return container.pullmergeRequestId && container.applicationId === id;
});
for (const container of jsonContainers) {
const found = await prisma.previewApplication.findMany({ where: { applicationId: container.applicationId, pullmergeRequestId: container.pullmergeRequestId } })
if (found.length === 0) {
await prisma.previewApplication.create({
data: {
pullmergeRequestId: container.pullmergeRequestId,
sourceBranch: container.branch,
customDomain: container.fqdn,
application: { connect: { id: container.applicationId } }
}
})
}
}
return {
previews: await prisma.previewApplication.findMany({ where: { applicationId: id } })
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getPreviews(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
@@ -899,26 +1095,7 @@ export async function getPreviews(request: FastifyRequest<OnlyId>) {
const applicationSecrets = secrets.filter((secret) => !secret.isPRMRSecret);
const PRMRSecrets = secrets.filter((secret) => secret.isPRMRSecret);
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
const { stdout } = await executeDockerCmd({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
if (stdout === '') {
return {
containers: [],
applicationSecrets: [],
PRMRSecrets: []
}
}
const containers = formatLabelsOnDocker(stdout).filter(container => container.Labels['coolify.configuration'] && container.Labels['coolify.type'] === 'standalone-application')
const jsonContainers = containers
.map((container) =>
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
)
.filter((container) => {
return container.pullmergeRequestId && container.applicationId === id;
});
return {
containers: jsonContainers,
applicationSecrets: applicationSecrets.sort((a, b) => {
return ('' + a.name).localeCompare(b.name);
}),
@@ -970,7 +1147,7 @@ export async function getApplicationLogs(request: FastifyRequest<GetApplicationL
return errorHandler({ status, message })
}
}
export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) {
export async function getBuilds(request: FastifyRequest<GetBuilds>) {
try {
const { id } = request.params
let { buildId, skip = 0 } = request.query
@@ -987,17 +1164,15 @@ export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) {
builds = await prisma.build.findMany({
where: { applicationId: id },
orderBy: { createdAt: 'desc' },
take: 5,
skip
take: 5 + skip
});
}
builds = builds.map((build) => {
const updatedAt = day(build.updatedAt).utc();
build.took = updatedAt.diff(day(build.createdAt)) / 1000;
build.since = updatedAt.fromNow();
return build;
});
if (build.status === 'running') {
build.elapsed = (day().utc().diff(day(build.createdAt)) / 1000).toFixed(0);
}
return build
})
return {
builds,
buildCount
@@ -1009,22 +1184,49 @@ export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) {
export async function getBuildIdLogs(request: FastifyRequest<GetBuildIdLogs>) {
try {
const { buildId } = request.params
// TODO: Fluentbit could still hold the logs, so we need to check if the logs are done
const { buildId, id } = request.params
let { sequence = 0 } = request.query
if (typeof sequence !== 'number') {
sequence = Number(sequence)
}
let logs = await prisma.buildLog.findMany({
where: { buildId, time: { gt: sequence } },
orderBy: { time: 'asc' }
});
let file = `/app/logs/${id}_buildlog_${buildId}.csv`
if (isDev) {
file = `${process.cwd()}/../../logs/${id}_buildlog_${buildId}.csv`
}
const data = await prisma.build.findFirst({ where: { id: buildId } });
const createdAt = day(data.createdAt).utc();
try {
await fs.stat(file)
} catch (error) {
let logs = await prisma.buildLog.findMany({
where: { buildId, time: { gt: sequence } },
orderBy: { time: 'asc' }
});
const data = await prisma.build.findFirst({ where: { id: buildId } });
const createdAt = day(data.createdAt).utc();
return {
logs: logs.map(log => {
log.time = Number(log.time)
return log
}),
fromDb: true,
took: day().diff(createdAt) / 1000,
status: data?.status || 'queued'
}
}
let fileLogs = (await fs.readFile(file)).toString()
let decryptedLogs = await csv({ noheader: true }).fromString(fileLogs)
let logs = decryptedLogs.map(log => {
const parsed = {
time: log['field1'],
line: decrypt(log['field2'] + '","' + log['field3'])
}
return parsed
}).filter(log => log.time > sequence)
return {
logs: logs.map(log => {
log.time = Number(log.time)
return log
}),
logs,
fromDb: false,
took: day().diff(createdAt) / 1000,
status: data?.status || 'queued'
}

View File

@@ -1,8 +1,8 @@
import { FastifyPluginAsync } from 'fastify';
import { OnlyId } from '../../../../types';
import { cancelDeployment, checkDNS, checkDomain, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildLogs, getBuildPack, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getSecrets, getStorages, getUsage, listApplications, newApplication, restartApplication, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication } from './handlers';
import { cancelDeployment, checkDNS, checkDomain, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication } from './handlers';
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuildLogs, GetImages, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => {
@@ -37,9 +37,12 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.delete<DeleteStorage>('/:id/storages', async (request) => await deleteStorage(request));
fastify.get<OnlyId>('/:id/previews', async (request) => await getPreviews(request));
fastify.post<OnlyId>('/:id/previews/load', async (request) => await loadPreviews(request));
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request));
fastify.get<GetBuildLogs>('/:id/logs/build', async (request) => await getBuildLogs(request));
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
fastify.get('/:id/usage', async (request) => await getUsage(request))

View File

@@ -89,7 +89,7 @@ export interface GetApplicationLogs extends OnlyId {
since: number,
}
}
export interface GetBuildLogs extends OnlyId {
export interface GetBuilds extends OnlyId {
Querystring: {
buildId: string
skip: number,
@@ -97,6 +97,7 @@ export interface GetBuildLogs extends OnlyId {
}
export interface GetBuildIdLogs {
Params: {
id: string,
buildId: string
},
Querystring: {
@@ -126,4 +127,10 @@ export interface StopPreviewApplication extends OnlyId {
Body: {
pullmergeRequestId: string | null,
}
}
export interface RestartPreviewApplication {
Params: {
id: string,
pullmergeRequestId: string | null,
}
}

View File

@@ -229,7 +229,7 @@ export async function getDestinationStatus(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const destination = await prisma.destinationDocker.findUnique({ where: { id } })
const isRunning = await checkContainer({ dockerId: destination.id, container: 'coolify-proxy', remove: true })
const { found: isRunning } = await checkContainer({ dockerId: destination.id, container: 'coolify-proxy', remove: true })
return {
isRunning
}

View File

@@ -1,13 +1,23 @@
import axios from 'axios';
import { compareVersions } from 'compare-versions';
import cuid from 'cuid';
import bcrypt from 'bcryptjs';
import { asyncExecShell, asyncSleep, cleanupDockerStorage, errorHandler, isDev, listSettings, prisma, uniqueName, version } from '../../../lib/common';
import { supportedServiceTypesAndVersions } from '../../../lib/services/supportedVersions';
import type { FastifyReply, FastifyRequest } from 'fastify';
import type { Login, Update } from '.';
import type { GetCurrentUser } from './types';
import axios from "axios";
import { compareVersions } from "compare-versions";
import cuid from "cuid";
import bcrypt from "bcryptjs";
import {
asyncExecShell,
asyncSleep,
cleanupDockerStorage,
errorHandler,
isDev,
listSettings,
prisma,
uniqueName,
version,
} from "../../../lib/common";
import { supportedServiceTypesAndVersions } from "../../../lib/services/supportedVersions";
import { scheduler } from "../../../lib/scheduler";
import type { FastifyReply, FastifyRequest } from "fastify";
import type { Login, Update } from ".";
import type { GetCurrentUser } from "./types";
export async function hashPassword(password: string): Promise<string> {
const saltRounds = 15;
@@ -17,34 +27,38 @@ export async function hashPassword(password: string): Promise<string> {
export async function cleanupManually(request: FastifyRequest) {
try {
const { serverId } = request.body;
const destination = await prisma.destinationDocker.findUnique({ where: { id: serverId } })
await cleanupDockerStorage(destination.id, true, true)
return {}
const destination = await prisma.destinationDocker.findUnique({
where: { id: serverId },
});
await cleanupDockerStorage(destination.id, true, true);
return {};
} catch ({ status, message }) {
return errorHandler({ status, message })
return errorHandler({ status, message });
}
}
export async function checkUpdate(request: FastifyRequest) {
try {
const isStaging = request.hostname === 'staging.coolify.io'
const isStaging =
request.hostname === "staging.coolify.io" ||
request.hostname === "arm.coolify.io";
const currentVersion = version;
const { data: versions } = await axios.get(
`https://get.coollabs.io/versions.json?appId=${process.env['COOLIFY_APP_ID']}&version=${currentVersion}`
`https://get.coollabs.io/versions.json?appId=${process.env["COOLIFY_APP_ID"]}&version=${currentVersion}`
);
const latestVersion = versions['coolify'].main.version
const latestVersion = versions["coolify"].main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isStaging) {
return {
isUpdateAvailable: true,
latestVersion: 'next'
}
latestVersion: "next",
};
}
return {
isUpdateAvailable: isStaging ? true : isUpdateAvailable === 1,
latestVersion
latestVersion,
};
} catch ({ status, message }) {
return errorHandler({ status, message })
return errorHandler({ status, message });
}
}
@@ -59,7 +73,7 @@ export async function update(request: FastifyRequest<Update>) {
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
);
await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
);
return {};
} else {
@@ -67,13 +81,27 @@ export async function update(request: FastifyRequest<Update>) {
return {};
}
} catch ({ status, message }) {
return errorHandler({ status, message })
return errorHandler({ status, message });
}
}
export async function resetQueue(request: FastifyRequest<any>) {
try {
const teamId = request.user.teamId;
if (teamId === "0") {
await prisma.build.updateMany({
where: { status: { in: ["queued", "running"] } },
data: { status: "canceled" },
});
scheduler.workers.get("deployApplication").postMessage("cancel");
}
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function restartCoolify(request: FastifyRequest<any>) {
try {
const teamId = request.user.teamId;
if (teamId === '0') {
if (teamId === "0") {
if (!isDev) {
asyncExecShell(`docker restart coolify`);
return {};
@@ -81,9 +109,12 @@ export async function restartCoolify(request: FastifyRequest<any>) {
return {};
}
}
throw { status: 500, message: 'You are not authorized to restart Coolify.' };
throw {
status: 500,
message: "You are not authorized to restart Coolify.",
};
} catch ({ status, message }) {
return errorHandler({ status, message })
return errorHandler({ status, message });
}
}
@@ -92,24 +123,24 @@ export async function showDashboard(request: FastifyRequest) {
const userId = request.user.userId;
const teamId = request.user.teamId;
const applications = await prisma.application.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true }
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true },
});
const databases = await prisma.database.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true }
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true },
});
const services = await prisma.service.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, teams: true }
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { destinationDocker: true, teams: true },
});
const gitSources = await prisma.gitSource.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { teams: true }
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { teams: true },
});
const destinations = await prisma.destinationDocker.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { teams: true }
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { teams: true },
});
const settings = await listSettings();
return {
@@ -121,88 +152,98 @@ export async function showDashboard(request: FastifyRequest) {
settings,
};
} catch ({ status, message }) {
return errorHandler({ status, message })
return errorHandler({ status, message });
}
}
export async function login(request: FastifyRequest<Login>, reply: FastifyReply) {
export async function login(
request: FastifyRequest<Login>,
reply: FastifyReply
) {
if (request.user) {
return reply.redirect('/dashboard');
return reply.redirect("/dashboard");
} else {
const { email, password, isLogin } = request.body || {};
if (!email || !password) {
throw { status: 500, message: 'Email and password are required.' };
throw { status: 500, message: "Email and password are required." };
}
const users = await prisma.user.count();
const userFound = await prisma.user.findUnique({
where: { email },
include: { teams: true, permission: true },
rejectOnNotFound: false
rejectOnNotFound: false,
});
if (!userFound && isLogin) {
throw { status: 500, message: 'User not found.' };
throw { status: 500, message: "User not found." };
}
const { isRegistrationEnabled, id } = await prisma.setting.findFirst()
const { isRegistrationEnabled, id } = await prisma.setting.findFirst();
let uid = cuid();
let permission = 'read';
let permission = "read";
let isAdmin = false;
if (users === 0) {
await prisma.setting.update({ where: { id }, data: { isRegistrationEnabled: false } });
uid = '0';
await prisma.setting.update({
where: { id },
data: { isRegistrationEnabled: false },
});
uid = "0";
}
if (userFound) {
if (userFound.type === 'email') {
if (userFound.password === 'RESETME') {
if (userFound.type === "email") {
if (userFound.password === "RESETME") {
const hashedPassword = await hashPassword(password);
if (userFound.updatedAt < new Date(Date.now() - 1000 * 60 * 10)) {
if (userFound.id === '0') {
if (userFound.id === "0") {
await prisma.user.update({
where: { email: userFound.email },
data: { password: 'RESETME' }
data: { password: "RESETME" },
});
} else {
await prisma.user.update({
where: { email: userFound.email },
data: { password: 'RESETTIMEOUT' }
data: { password: "RESETTIMEOUT" },
});
}
throw {
status: 500,
message: 'Password reset link has expired. Please request a new one.'
message:
"Password reset link has expired. Please request a new one.",
};
} else {
await prisma.user.update({
where: { email: userFound.email },
data: { password: hashedPassword }
data: { password: hashedPassword },
});
return {
userId: userFound.id,
teamId: userFound.id,
permission: userFound.permission,
isAdmin: true
isAdmin: true,
};
}
}
const passwordMatch = await bcrypt.compare(password, userFound.password);
const passwordMatch = await bcrypt.compare(
password,
userFound.password
);
if (!passwordMatch) {
throw {
status: 500,
message: 'Wrong password or email address.'
message: "Wrong password or email address.",
};
}
uid = userFound.id;
isAdmin = true;
}
} else {
permission = 'owner';
permission = "owner";
isAdmin = true;
if (!isRegistrationEnabled) {
throw {
status: 404,
message: 'Registration disabled by administrator.'
message: "Registration disabled by administrator.",
};
}
const hashedPassword = await hashPassword(password);
@@ -212,17 +253,17 @@ export async function login(request: FastifyRequest<Login>, reply: FastifyReply)
id: uid,
email,
password: hashedPassword,
type: 'email',
type: "email",
teams: {
create: {
id: uid,
name: uniqueName(),
destinationDocker: { connect: { network: 'coolify' } }
}
destinationDocker: { connect: { network: "coolify" } },
},
},
permission: { create: { teamId: uid, permission: 'owner' } }
permission: { create: { teamId: uid, permission: "owner" } },
},
include: { teams: true }
include: { teams: true },
});
} else {
await prisma.user.create({
@@ -230,16 +271,16 @@ export async function login(request: FastifyRequest<Login>, reply: FastifyReply)
id: uid,
email,
password: hashedPassword,
type: 'email',
type: "email",
teams: {
create: {
id: uid,
name: uniqueName()
}
name: uniqueName(),
},
},
permission: { create: { teamId: uid, permission: 'owner' } }
permission: { create: { teamId: uid, permission: "owner" } },
},
include: { teams: true }
include: { teams: true },
});
}
}
@@ -247,18 +288,21 @@ export async function login(request: FastifyRequest<Login>, reply: FastifyReply)
userId: uid,
teamId: uid,
permission,
isAdmin
isAdmin,
};
}
}
export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fastify) {
let token = null
const { teamId } = request.query
export async function getCurrentUser(
request: FastifyRequest<GetCurrentUser>,
fastify
) {
let token = null;
const { teamId } = request.query;
try {
const user = await prisma.user.findUnique({
where: { id: request.user.userId }
})
where: { id: request.user.userId },
});
if (!user) {
throw "User not found";
}
@@ -269,20 +313,20 @@ export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fa
try {
const user = await prisma.user.findFirst({
where: { id: request.user.userId, teams: { some: { id: teamId } } },
include: { teams: true, permission: true }
})
include: { teams: true, permission: true },
});
if (user) {
const permission = user.permission.find(p => p.teamId === teamId).permission
const permission = user.permission.find(
(p) => p.teamId === teamId
).permission;
const payload = {
...request.user,
teamId,
permission: permission || null,
isAdmin: permission === 'owner' || permission === 'admin'
}
token = fastify.jwt.sign(payload)
isAdmin: permission === "owner" || permission === "admin",
};
token = fastify.jwt.sign(payload);
}
} catch (error) {
// No new token -> not switching teams
}
@@ -291,6 +335,6 @@ export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fa
settings: await prisma.setting.findFirst(),
supportedServiceTypesAndVersions,
token,
...request.user
}
...request.user,
};
}

View File

@@ -1,5 +1,5 @@
import { FastifyPluginAsync } from 'fastify';
import { checkUpdate, login, showDashboard, update, showUsage, getCurrentUser, cleanupManually, restartCoolify } from './handlers';
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify } from './handlers';
import { GetCurrentUser } from './types';
export interface Update {
@@ -23,9 +23,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
onRequest: [fastify.authenticate]
}, async (request) => await getCurrentUser(request, fastify));
fastify.get('/undead', {
onRequest: [fastify.authenticate]
}, async function () {
fastify.get('/undead', async function () {
return { message: 'nope' };
});
@@ -47,6 +45,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
onRequest: [fastify.authenticate]
}, async (request) => await restartCoolify(request));
fastify.post('/internal/resetQueue', {
onRequest: [fastify.authenticate]
}, async (request) => await resetQueue(request));
fastify.post('/internal/cleanup', {
onRequest: [fastify.authenticate]
}, async (request) => await cleanupManually(request));

View File

@@ -8,9 +8,7 @@ export async function listServers(request: FastifyRequest) {
try {
const userId = request.user.userId;
const teamId = request.user.teamId;
const servers = await prisma.destinationDocker.findMany({ where: { teams: { some: { id: teamId === '0' ? undefined : teamId } }, remoteEngine: false }, distinct: ['engine'] })
// const remoteServers = await prisma.destinationDocker.findMany({ where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }, distinct: ['remoteIpAddress', 'engine'] })
const servers = await prisma.destinationDocker.findMany({ where: { teams: { some: { id: teamId === '0' ? undefined : teamId } }}, distinct: ['remoteIpAddress', 'engine'] })
return {
servers
}
@@ -67,8 +65,7 @@ export async function showUsage(request: FastifyRequest) {
const { stdout: stats } = await executeSSHCmd({ dockerId: id, command: `vmstat -s` })
const { stdout: disks } = await executeSSHCmd({ dockerId: id, command: `df -m / --output=size,used,pcent|grep -v 'Used'| xargs` })
const { stdout: cpus } = await executeSSHCmd({ dockerId: id, command: `nproc --all` })
// const { stdout: cpuUsage } = await executeSSHCmd({ dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` })
// console.log(cpuUsage)
const { stdout: cpuUsage } = await executeSSHCmd({ dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` })
const parsed: any = parseFromText(stats)
return {
usage: {
@@ -81,8 +78,8 @@ export async function showUsage(request: FastifyRequest) {
freeMemPercentage: (parsed.totalMemoryKB - parsed.usedMemoryKB) / parsed.totalMemoryKB * 100
},
cpu: {
load: 0,
usage: 0,
load: [0,0,0],
usage: cpuUsage,
count: cpus
},
disk: {

View File

@@ -43,13 +43,17 @@ export async function getServiceStatus(request: FastifyRequest<OnlyId>) {
let isRunning = false;
let isExited = false
let isRestarting = false;
const service = await getServiceFromDB({ id, teamId });
const { destinationDockerId, settings } = service;
if (destinationDockerId) {
isRunning = await checkContainer({ dockerId: service.destinationDocker.id, container: id });
isExited = await isContainerExited(service.destinationDocker.id, id);
const status = await checkContainer({ dockerId: service.destinationDocker.id, container: id });
if (status?.found) {
isRunning = status.status.isRunning;
isExited = status.status.isExited;
isRestarting = status.status.isRestarting
}
}
return {
isRunning,
@@ -452,7 +456,7 @@ export async function activatePlausibleUsers(request: FastifyRequest<OnlyId>, re
if (destinationDockerId) {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker exec ${id} 'psql -H postgresql://${postgresqlUser}:${postgresqlPassword}@localhost:5432/${postgresqlDatabase} -c "UPDATE users SET email_verified = true;"'`
command: `docker exec ${id}-postgresql psql -H postgresql://${postgresqlUser}:${postgresqlPassword}@localhost:5432/${postgresqlDatabase} -c "UPDATE users SET email_verified = true;"`
})
return await reply.code(201).send()
}
@@ -472,7 +476,7 @@ export async function cleanupPlausibleLogs(request: FastifyRequest<OnlyId>, repl
if (destinationDockerId) {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker exec ${id}-clickhouse sh -c "/usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\""`
command: `docker exec ${id}-clickhouse /usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\"`
})
return await reply.code(201).send()
}
@@ -554,7 +558,7 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
});
try {
const isRunning = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
const { found: isRunning } = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
if (isRunning) {
await executeDockerCmd({
dockerId: destinationDocker.id,

View File

@@ -30,7 +30,7 @@ import {
import type { OnlyId } from '../../../../types';
import type { ActivateWordpressFtp, CheckService, CheckServiceDomain, DeleteServiceSecret, DeleteServiceStorage, GetServiceLogs, SaveService, SaveServiceDestination, SaveServiceSecret, SaveServiceSettings, SaveServiceStorage, SaveServiceType, SaveServiceVersion, ServiceStartStop, SetGlitchTipSettings, SetWordpressSettings } from './types';
import { startService, stopService } from '../../../../lib/services/handlers';
import { migrateAppwriteDB, startService, stopService } from '../../../../lib/services/handlers';
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => {
@@ -76,6 +76,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.post<OnlyId>('/:id/plausibleanalytics/activate', async (request, reply) => await activatePlausibleUsers(request, reply));
fastify.post<OnlyId>('/:id/plausibleanalytics/cleanup', async (request, reply) => await cleanupPlausibleLogs(request, reply));
fastify.post<ActivateWordpressFtp>('/:id/wordpress/ftp', async (request, reply) => await activateWordpressFtp(request, reply));
fastify.post<OnlyId>('/:id/appwrite/migrate', async (request, reply) => await migrateAppwriteDB(request, reply));
};
export default root;

View File

@@ -1,7 +1,7 @@
import axios from "axios";
import cuid from "cuid";
import crypto from "crypto";
import { encrypt, errorHandler, getUIUrl, isDev, prisma } from "../../../lib/common";
import { encrypt, errorHandler, getDomain, getUIUrl, isDev, prisma } from "../../../lib/common";
import { checkContainer, removeContainer } from "../../../lib/docker";
import { createdBranchDatabase, getApplicationFromDBWebhook, removeBranchDatabase } from "../../api/v1/applications/handlers";
@@ -154,7 +154,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
if (application.settings.previews) {
if (application.destinationDockerId) {
const isRunning = await checkContainer(
const { found: isRunning } = await checkContainer(
{
dockerId: application.destinationDocker.id,
container: application.id
@@ -169,10 +169,29 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
pullmergeRequestAction === 'reopened' ||
pullmergeRequestAction === 'synchronize'
) {
await prisma.application.update({
where: { id: application.id },
data: { updatedAt: new Date() }
});
let previewApplicationId = undefined
if (pullmergeRequestId) {
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
if (foundPreviewApplications.length > 0) {
previewApplicationId = foundPreviewApplications[0].id
} else {
const protocol = application.fqdn.includes('https://') ? 'https://' : 'http://'
const previewApplication = await prisma.previewApplication.create({
data: {
pullmergeRequestId,
sourceBranch,
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
application: { connect: { id: application.id } }
}
})
previewApplicationId = previewApplication.id
}
}
// if (application.connectedDatabase && pullmergeRequestAction === 'opened' || pullmergeRequestAction === 'reopened') {
// // Coolify hosted database
// if (application.connectedDatabase.databaseId) {
@@ -187,6 +206,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
data: {
id: buildId,
pullmergeRequestId,
previewApplicationId,
sourceBranch,
applicationId: application.id,
destinationDockerId: application.destinationDocker.id,
@@ -198,7 +218,9 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
}
});
return {
message: 'Queued. Thank you!'
};
} else if (pullmergeRequestAction === 'closed') {
if (application.destinationDockerId) {
const id = `${application.id}-${pullmergeRequestId}`;
@@ -206,13 +228,22 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
await removeContainer({ id, dockerId: application.destinationDocker.id });
} catch (error) { }
}
if (application.connectedDatabase.databaseId) {
const databaseId = application.connectedDatabase.databaseId;
const database = await prisma.database.findUnique({ where: { id: databaseId } });
if (database) {
await removeBranchDatabase(database, pullmergeRequestId);
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
if (foundPreviewApplications.length > 0) {
for (const preview of foundPreviewApplications) {
await prisma.previewApplication.delete({ where: { id: preview.id } })
}
}
return {
message: 'PR closed. Thank you!'
};
// if (application?.connectedDatabase?.databaseId) {
// const databaseId = application.connectedDatabase.databaseId;
// const database = await prisma.database.findUnique({ where: { id: databaseId } });
// if (database) {
// await removeBranchDatabase(database, pullmergeRequestId);
// }
// }
}
}
}

View File

@@ -2,7 +2,7 @@ import axios from "axios";
import cuid from "cuid";
import crypto from "crypto";
import type { FastifyReply, FastifyRequest } from "fastify";
import { errorHandler, getAPIUrl, getUIUrl, isDev, listSettings, prisma } from "../../../lib/common";
import { errorHandler, getAPIUrl, getDomain, getUIUrl, isDev, listSettings, prisma } from "../../../lib/common";
import { checkContainer, removeContainer } from "../../../lib/docker";
import { getApplicationFromDB, getApplicationFromDBWebhook } from "../../api/v1/applications/handlers";
@@ -91,8 +91,8 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
}
}
} else if (objectKind === 'merge_request') {
const { object_attributes: { work_in_progress: isDraft, action, source_branch: sourceBranch, target_branch: targetBranch, iid: pullmergeRequestId }, project: { id } } = request.body
const { object_attributes: { work_in_progress: isDraft, action, source_branch: sourceBranch, target_branch: targetBranch }, project: { id } } = request.body
const pullmergeRequestId = request.body.object_attributes.iid.toString();
const projectId = Number(id);
if (!allowedActions.includes(action)) {
throw { status: 500, message: 'Action not allowed.' }
@@ -107,7 +107,7 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
const buildId = cuid();
if (application.settings.previews) {
if (application.destinationDockerId) {
const isRunning = await checkContainer(
const { found: isRunning } = await checkContainer(
{
dockerId: application.destinationDocker.id,
container: application.id
@@ -130,10 +130,29 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
where: { id: application.id },
data: { updatedAt: new Date() }
});
let previewApplicationId = undefined
if (pullmergeRequestId) {
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
if (foundPreviewApplications.length > 0) {
previewApplicationId = foundPreviewApplications[0].id
} else {
const protocol = application.fqdn.includes('https://') ? 'https://' : 'http://'
const previewApplication = await prisma.previewApplication.create({
data: {
pullmergeRequestId,
sourceBranch,
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
application: { connect: { id: application.id } }
}
})
previewApplicationId = previewApplication.id
}
}
await prisma.build.create({
data: {
id: buildId,
pullmergeRequestId: pullmergeRequestId.toString(),
pullmergeRequestId,
previewApplicationId,
sourceBranch,
applicationId: application.id,
destinationDockerId: application.destinationDocker.id,
@@ -150,8 +169,19 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
} else if (action === 'close') {
if (application.destinationDockerId) {
const id = `${application.id}-${pullmergeRequestId}`;
await removeContainer({ id, dockerId: application.destinationDocker.id });
try {
await removeContainer({ id, dockerId: application.destinationDocker.id });
} catch (error) { }
}
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
if (foundPreviewApplications.length > 0) {
for (const preview of foundPreviewApplications) {
await prisma.previewApplication.delete({ where: { id: preview.id } })
}
}
return {
message: 'MR closed. Thank you!'
};
}
}

View File

@@ -12,7 +12,7 @@ function configureMiddleware(
if (isHttps) {
traefik.http.routers[id] = {
entrypoints: ['web'],
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`,
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
service: `${id}`,
middlewares: ['redirect-to-https']
};
@@ -53,7 +53,7 @@ function configureMiddleware(
if (isDualCerts) {
traefik.http.routers[`${id}-secure`] = {
entrypoints: ['websecure'],
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`,
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
service: `${id}`,
tls: {
certresolver: 'letsencrypt'
@@ -64,7 +64,7 @@ function configureMiddleware(
if (isWWW) {
traefik.http.routers[`${id}-secure-www`] = {
entrypoints: ['websecure'],
rule: `Host(\`www.${nakedDomain}\`)`,
rule: `Host(\`www.${nakedDomain}\`) && PathPrefix(\`/\`)`,
service: `${id}`,
tls: {
certresolver: 'letsencrypt'
@@ -73,7 +73,7 @@ function configureMiddleware(
};
traefik.http.routers[`${id}-secure`] = {
entrypoints: ['websecure'],
rule: `Host(\`${nakedDomain}\`)`,
rule: `Host(\`${nakedDomain}\`) && PathPrefix(\`/\`)`,
service: `${id}`,
tls: {
domains: {
@@ -86,7 +86,7 @@ function configureMiddleware(
} else {
traefik.http.routers[`${id}-secure-www`] = {
entrypoints: ['websecure'],
rule: `Host(\`www.${nakedDomain}\`)`,
rule: `Host(\`www.${nakedDomain}\`) && PathPrefix(\`/\`)`,
service: `${id}`,
tls: {
domains: {
@@ -97,7 +97,7 @@ function configureMiddleware(
};
traefik.http.routers[`${id}-secure`] = {
entrypoints: ['websecure'],
rule: `Host(\`${domain}\`)`,
rule: `Host(\`${domain}\`) && PathPrefix(\`/\`)`,
service: `${id}`,
tls: {
certresolver: 'letsencrypt'
@@ -110,14 +110,14 @@ function configureMiddleware(
} else {
traefik.http.routers[id] = {
entrypoints: ['web'],
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`,
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
service: `${id}`,
middlewares: []
};
traefik.http.routers[`${id}-secure`] = {
entrypoints: ['websecure'],
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`,
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
service: `${id}`,
tls: {
domains: {