Merge branch 'next' into grafana-service

This commit is contained in:
Andras Bacsai
2022-09-20 14:50:37 +02:00
committed by GitHub
144 changed files with 4815 additions and 4349 deletions

View File

@@ -1,4 +1,4 @@
import { base64Encode, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
import { base64Encode, encrypt, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
import { promises as fs } from 'fs';
import { day } from "../dayjs";
@@ -461,17 +461,32 @@ export const saveBuildLog = async ({
buildId: string;
applicationId: string;
}): Promise<any> => {
const { default: got } = await import('got')
if (line && typeof line === 'string' && line.includes('ghs_')) {
const regex = /ghs_.*@/g;
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
}
const addTimestamp = `[${generateTimestamp()}] ${line}`;
if (isDev) console.debug(`[${applicationId}] ${addTimestamp}`);
return await prisma.buildLog.create({
data: {
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
}
});
const fluentBitUrl = isDev ? 'http://localhost:24224' : 'http://coolify-fluentbit:24224';
if (isDev) {
console.debug(`[${applicationId}] ${addTimestamp}`);
}
try {
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
json: {
line: encrypt(line)
}
})
} catch(error) {
return await prisma.buildLog.create({
data: {
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
}
});
}
};
export async function copyBaseConfigurationFiles(
@@ -556,7 +571,6 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
);
}
export async function buildImage({
applicationId,
tag,
@@ -677,8 +691,6 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
secrets,
pullmergeRequestId
} = data;
const isPnpm = checkPnpm(installCommand, buildCommand);
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild}`);
@@ -688,7 +700,10 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
@@ -722,7 +737,10 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -28,6 +28,7 @@ export default async function (data) {
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
// TODO: fix secrets
if (
(pullmergeRequestId && secret.isPRMRSecret) ||
(!pullmergeRequestId && !secret.isPRMRSecret)

View File

@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -23,7 +23,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -16,7 +16,10 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -21,7 +21,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

View File

@@ -24,7 +24,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,7 @@ export function formatLabelsOnDocker(data) {
return container
})
}
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<boolean> {
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
let containerFound = false;
try {
const { stdout } = await executeDockerCmd({
@@ -21,10 +21,12 @@ export async function checkContainer({ dockerId, container, remove = false }: {
command:
`docker inspect --format '{{json .State}}' ${container}`
});
containerFound = true
const parsedStdout = JSON.parse(stdout);
const status = parsedStdout.Status;
const isRunning = status === 'running';
const isRestarting = status === 'restarting'
const isExited = status === 'exited'
if (status === 'created') {
await executeDockerCmd({
dockerId,
@@ -39,13 +41,23 @@ export async function checkContainer({ dockerId, container, remove = false }: {
`docker rm ${container}`
});
}
if (isRunning) {
containerFound = true;
}
return {
found: containerFound,
status: {
isRunning,
isRestarting,
isExited
}
};
} catch (err) {
// Container not found
}
return containerFound;
return {
found: false
};
}
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {

View File

@@ -10,7 +10,8 @@ export default async function ({
branch,
buildId,
privateSshKey,
customPort
customPort,
forPublic
}: {
applicationId: string;
workdir: string;
@@ -21,11 +22,15 @@ export default async function ({
repodir: string;
privateSshKey: string;
customPort: number;
forPublic: boolean;
}): Promise<string> {
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
if (!forPublic) {
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
}
await saveBuildLog({
line: `Cloning ${repository}:${branch} branch.`,
@@ -33,9 +38,16 @@ export default async function ({
applicationId
});
await asyncExecShell(
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
);
if (forPublic) {
await asyncExecShell(
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
);
} else {
await asyncExecShell(
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
);
}
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
return commit.replace('\n', '');
}

View File

@@ -5,6 +5,7 @@ import bcrypt from 'bcryptjs';
import { ServiceStartStop } from '../../routes/api/v1/services/types';
import { asyncSleep, ComposeFile, createDirectories, defaultComposeConfiguration, errorHandler, executeDockerCmd, getDomain, getFreePublicPort, getServiceFromDB, getServiceImage, getServiceMainPort, isARM, isDev, makeLabelForServices, persistentVolumes, prisma } from '../common';
import { defaultServiceConfigurations } from '../services';
import { OnlyId } from '../../types';
export async function startService(request: FastifyRequest<ServiceStartStop>) {
try {
@@ -317,7 +318,7 @@ async function startMinioService(request: FastifyRequest<ServiceStartStop>) {
destinationDocker,
persistentStorage,
exposePort,
minio: { rootUser, rootUserPassword },
minio: { rootUser, rootUserPassword, apiFqdn },
serviceSecret
} = service;
@@ -336,7 +337,7 @@ async function startMinioService(request: FastifyRequest<ServiceStartStop>) {
image: `${image}:${version}`,
volumes: [`${id}-minio-data:/data`],
environmentVariables: {
MINIO_SERVER_URL: fqdn,
MINIO_SERVER_URL: apiFqdn,
MINIO_DOMAIN: getDomain(fqdn),
MINIO_ROOT_USER: rootUser,
MINIO_ROOT_PASSWORD: rootUserPassword,
@@ -658,7 +659,7 @@ async function startLanguageToolService(request: FastifyRequest<ServiceStartStop
image: config.languagetool.image,
environment: config.languagetool.environmentVariables,
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
volumes: config.languagetool,
volumes: config.languagetool.volumes,
labels: makeLabelForServices('languagetool'),
...defaultComposeConfiguration(network),
}
@@ -713,7 +714,7 @@ async function startN8nService(request: FastifyRequest<ServiceStartStop>) {
[id]: {
container_name: id,
image: config.n8n.image,
volumes: config.n8n,
volumes: config.n8n.volumes,
environment: config.n8n.environmentVariables,
labels: makeLabelForServices('n8n'),
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
@@ -1009,79 +1010,136 @@ async function startUmamiService(request: FastifyRequest<ServiceStartStop>) {
}
const initDbSQL = `
drop table if exists event;
drop table if exists pageview;
drop table if exists session;
drop table if exists website;
drop table if exists account;
create table account (
user_id serial primary key,
username varchar(255) unique not null,
password varchar(60) not null,
is_admin bool not null default false,
created_at timestamp with time zone default current_timestamp,
updated_at timestamp with time zone default current_timestamp
);
create table website (
website_id serial primary key,
website_uuid uuid unique not null,
user_id int not null references account(user_id) on delete cascade,
name varchar(100) not null,
domain varchar(500),
share_id varchar(64) unique,
created_at timestamp with time zone default current_timestamp
);
create table session (
session_id serial primary key,
session_uuid uuid unique not null,
website_id int not null references website(website_id) on delete cascade,
created_at timestamp with time zone default current_timestamp,
hostname varchar(100),
browser varchar(20),
os varchar(20),
device varchar(20),
screen varchar(11),
language varchar(35),
country char(2)
);
create table pageview (
view_id serial primary key,
website_id int not null references website(website_id) on delete cascade,
session_id int not null references session(session_id) on delete cascade,
created_at timestamp with time zone default current_timestamp,
url varchar(500) not null,
referrer varchar(500)
);
create table event (
event_id serial primary key,
website_id int not null references website(website_id) on delete cascade,
session_id int not null references session(session_id) on delete cascade,
created_at timestamp with time zone default current_timestamp,
url varchar(500) not null,
event_type varchar(50) not null,
event_value varchar(50) not null
);
create index website_user_id_idx on website(user_id);
create index session_created_at_idx on session(created_at);
create index session_website_id_idx on session(website_id);
create index pageview_created_at_idx on pageview(created_at);
create index pageview_website_id_idx on pageview(website_id);
create index pageview_session_id_idx on pageview(session_id);
create index pageview_website_id_created_at_idx on pageview(website_id, created_at);
create index pageview_website_id_session_id_created_at_idx on pageview(website_id, session_id, created_at);
create index event_created_at_idx on event(created_at);
create index event_website_id_idx on event(website_id);
create index event_session_id_idx on event(session_id);
-- CreateTable
CREATE TABLE "account" (
"user_id" SERIAL NOT NULL,
"username" VARCHAR(255) NOT NULL,
"password" VARCHAR(60) NOT NULL,
"is_admin" BOOLEAN NOT NULL DEFAULT false,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("user_id")
);
-- CreateTable
CREATE TABLE "event" (
"event_id" SERIAL NOT NULL,
"website_id" INTEGER NOT NULL,
"session_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"url" VARCHAR(500) NOT NULL,
"event_type" VARCHAR(50) NOT NULL,
"event_value" VARCHAR(50) NOT NULL,
PRIMARY KEY ("event_id")
);
-- CreateTable
CREATE TABLE "pageview" (
"view_id" SERIAL NOT NULL,
"website_id" INTEGER NOT NULL,
"session_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"url" VARCHAR(500) NOT NULL,
"referrer" VARCHAR(500),
PRIMARY KEY ("view_id")
);
-- CreateTable
CREATE TABLE "session" (
"session_id" SERIAL NOT NULL,
"session_uuid" UUID NOT NULL,
"website_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"hostname" VARCHAR(100),
"browser" VARCHAR(20),
"os" VARCHAR(20),
"device" VARCHAR(20),
"screen" VARCHAR(11),
"language" VARCHAR(35),
"country" CHAR(2),
PRIMARY KEY ("session_id")
);
-- CreateTable
CREATE TABLE "website" (
"website_id" SERIAL NOT NULL,
"website_uuid" UUID NOT NULL,
"user_id" INTEGER NOT NULL,
"name" VARCHAR(100) NOT NULL,
"domain" VARCHAR(500),
"share_id" VARCHAR(64),
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("website_id")
);
-- CreateIndex
CREATE UNIQUE INDEX "account.username_unique" ON "account"("username");
-- CreateIndex
CREATE INDEX "event_created_at_idx" ON "event"("created_at");
-- CreateIndex
CREATE INDEX "event_session_id_idx" ON "event"("session_id");
-- CreateIndex
CREATE INDEX "event_website_id_idx" ON "event"("website_id");
-- CreateIndex
CREATE INDEX "pageview_created_at_idx" ON "pageview"("created_at");
-- CreateIndex
CREATE INDEX "pageview_session_id_idx" ON "pageview"("session_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_created_at_idx" ON "pageview"("website_id", "created_at");
-- CreateIndex
CREATE INDEX "pageview_website_id_idx" ON "pageview"("website_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_session_id_created_at_idx" ON "pageview"("website_id", "session_id", "created_at");
-- CreateIndex
CREATE UNIQUE INDEX "session.session_uuid_unique" ON "session"("session_uuid");
-- CreateIndex
CREATE INDEX "session_created_at_idx" ON "session"("created_at");
-- CreateIndex
CREATE INDEX "session_website_id_idx" ON "session"("website_id");
-- CreateIndex
CREATE UNIQUE INDEX "website.website_uuid_unique" ON "website"("website_uuid");
-- CreateIndex
CREATE UNIQUE INDEX "website.share_id_unique" ON "website"("share_id");
-- CreateIndex
CREATE INDEX "website_user_id_idx" ON "website"("user_id");
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "session" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "website" ADD FOREIGN KEY ("user_id") REFERENCES "account"("user_id") ON DELETE CASCADE ON UPDATE CASCADE;
insert into account (username, password, is_admin) values ('admin', '${bcrypt.hashSync(
umamiAdminPassword,
10
@@ -1119,7 +1177,6 @@ async function startUmamiService(request: FastifyRequest<ServiceStartStop>) {
},
volumes: volumeMounts
};
console.log(composeFile)
const composeFileDestination = `${workdir}/docker-compose.yaml`;
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
await startServiceContainers(destinationDocker.id, composeFileDestination)
@@ -1321,10 +1378,6 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
const teamId = request.user.teamId;
const { version, fqdn, destinationDocker, secrets, exposePort, network, port, workdir, image, appwrite } = await defaultServiceConfigurations({ id, teamId })
let isStatsEnabled = false
if (secrets.find(s => s === '_APP_USAGE_STATS=enabled')) {
isStatsEnabled = true
}
const {
opensslKeyV1,
executorSecret,
@@ -1702,50 +1755,48 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
},
};
if (isStatsEnabled) {
dockerCompose[id].depends_on.push(`${id}-influxdb`);
dockerCompose[`${id}-usage`] = {
image: `${image}:${version}`,
container_name: `${id}-usage`,
labels: makeLabelForServices('appwrite'),
entrypoint: "usage",
depends_on: [
`${id}-mariadb`,
`${id}-influxdb`,
],
environment: [
"_APP_ENV=production",
`_APP_OPENSSL_KEY_V1=${opensslKeyV1}`,
`_APP_DB_HOST=${mariadbHost}`,
`_APP_DB_PORT=${mariadbPort}`,
`_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`,
`_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086",
`_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379",
...secrets
],
...defaultComposeConfiguration(network),
}
dockerCompose[`${id}-influxdb`] = {
image: "appwrite/influxdb:1.5.0",
container_name: `${id}-influxdb`,
volumes: [
`${id}-influxdb:/var/lib/influxdb:rw`
],
...defaultComposeConfiguration(network),
}
dockerCompose[`${id}-telegraf`] = {
image: "appwrite/telegraf:1.4.0",
container_name: `${id}-telegraf`,
environment: [
`_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086",
],
...defaultComposeConfiguration(network),
}
dockerCompose[id].depends_on.push(`${id}-influxdb`);
dockerCompose[`${id}-usage`] = {
image: `${image}:${version}`,
container_name: `${id}-usage`,
labels: makeLabelForServices('appwrite'),
entrypoint: "usage",
depends_on: [
`${id}-mariadb`,
`${id}-influxdb`,
],
environment: [
"_APP_ENV=production",
`_APP_OPENSSL_KEY_V1=${opensslKeyV1}`,
`_APP_DB_HOST=${mariadbHost}`,
`_APP_DB_PORT=${mariadbPort}`,
`_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`,
`_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086",
`_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379",
...secrets
],
...defaultComposeConfiguration(network),
}
dockerCompose[`${id}-influxdb`] = {
image: "appwrite/influxdb:1.5.0",
container_name: `${id}-influxdb`,
volumes: [
`${id}-influxdb:/var/lib/influxdb:rw`
],
...defaultComposeConfiguration(network),
}
dockerCompose[`${id}-telegraf`] = {
image: "appwrite/telegraf:1.4.0",
container_name: `${id}-telegraf`,
environment: [
`_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086",
],
...defaultComposeConfiguration(network),
}
const composeFile: any = {
@@ -2646,4 +2697,25 @@ async function startGrafanaService(request: FastifyRequest<ServiceStartStop>) {
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
}
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
try {
const { id } = request.params
const teamId = request.user.teamId;
const {
destinationDockerId,
destinationDocker,
} = await getServiceFromDB({ id, teamId });
if (destinationDockerId) {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker exec ${id} migrate`
})
return await reply.code(201).send()
}
throw { status: 500, message: 'Could cleanup logs.' }
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}

View File

@@ -1,3 +1,24 @@
/*
Example of a supported version:
{
// Name used to identify the service internally
name: 'umami',
// Fancier name to show to the user
fancyName: 'Umami',
// Docker base image for the service
baseImage: 'ghcr.io/mikecao/umami',
// Optional: If there is any dependent image, you should list it here
images: [],
// Usable tags
versions: ['postgresql-latest'],
// Which tag is the recommended
recommendedVersion: 'postgresql-latest',
// Application's default port, Umami listens on 3000
ports: {
main: 3000
}
}
*/
export const supportedServiceTypesAndVersions = [
{
name: 'plausibleanalytics',
@@ -116,7 +137,7 @@ export const supportedServiceTypesAndVersions = [
{
name: 'umami',
fancyName: 'Umami',
baseImage: 'ghcr.io/mikecao/umami',
baseImage: 'ghcr.io/umami-software/umami',
images: ['postgres:12-alpine'],
versions: ['postgresql-latest'],
recommendedVersion: 'postgresql-latest',
@@ -151,8 +172,8 @@ export const supportedServiceTypesAndVersions = [
fancyName: 'Appwrite',
baseImage: 'appwrite/appwrite',
images: ['mariadb:10.7', 'redis:6.2-alpine', 'appwrite/telegraf:1.4.0'],
versions: ['latest', '0.15.3'],
recommendedVersion: '0.15.3',
versions: ['latest', '1.0','0.15.3'],
recommendedVersion: '1.0',
ports: {
main: 80
}