fix: build queue system

This commit is contained in:
Andras Bacsai
2022-08-25 10:04:46 +02:00
parent f379519d40
commit 01e71958b2
18 changed files with 915 additions and 753 deletions

View File

@@ -27,7 +27,7 @@
"bcryptjs": "2.4.3",
"bree": "9.1.2",
"cabin": "9.1.2",
"compare-versions": "4.1.3",
"compare-versions": "4.1.4",
"cuid": "2.1.8",
"dayjs": "1.11.5",
"dockerode": "3.3.4",
@@ -43,17 +43,17 @@
"jsonwebtoken": "8.5.1",
"node-forge": "1.3.1",
"node-os-utils": "1.3.7",
"p-queue": "7.3.0",
"p-all": "4.0.0",
"public-ip": "6.0.1",
"ssh-config": "4.1.6",
"strip-ansi": "7.0.1",
"unique-names-generator": "4.7.1"
},
"devDependencies": {
"@types/node": "18.7.11",
"@types/node": "18.7.13",
"@types/node-os-utils": "1.3.0",
"@typescript-eslint/eslint-plugin": "5.34.0",
"@typescript-eslint/parser": "5.34.0",
"@typescript-eslint/eslint-plugin": "5.35.1",
"@typescript-eslint/parser": "5.35.1",
"esbuild": "0.15.5",
"eslint": "8.22.0",
"eslint-config-prettier": "8.5.0",

View File

@@ -0,0 +1,29 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"proxyPassword" TEXT NOT NULL,
"proxyUser" TEXT NOT NULL,
"proxyHash" TEXT,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"DNSServers" TEXT,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1
);
INSERT INTO "new_Setting" ("DNSServers", "arch", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "DNSServers", "arch", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,24 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Build" (
"id" TEXT NOT NULL PRIMARY KEY,
"type" TEXT NOT NULL,
"applicationId" TEXT,
"destinationDockerId" TEXT,
"gitSourceId" TEXT,
"githubAppId" TEXT,
"gitlabAppId" TEXT,
"commit" TEXT,
"pullmergeRequestId" TEXT,
"forceRebuild" BOOLEAN NOT NULL DEFAULT false,
"sourceBranch" TEXT,
"branch" TEXT,
"status" TEXT DEFAULT 'queued',
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Build" ("applicationId", "branch", "commit", "createdAt", "destinationDockerId", "gitSourceId", "githubAppId", "gitlabAppId", "id", "status", "type", "updatedAt") SELECT "applicationId", "branch", "commit", "createdAt", "destinationDockerId", "gitSourceId", "githubAppId", "gitlabAppId", "id", "status", "type", "updatedAt" FROM "Build";
DROP TABLE "Build";
ALTER TABLE "new_Build" RENAME TO "Build";
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -27,6 +27,7 @@ model Setting {
ipv4 String?
ipv6 String?
arch String?
concurrentBuilds Int @default(1)
}
model User {
@@ -197,6 +198,9 @@ model Build {
githubAppId String?
gitlabAppId String?
commit String?
pullmergeRequestId String?
forceRebuild Boolean @default(false)
sourceBranch String?
branch String?
status String? @default("queued")
createdAt DateTime @default(now())

View File

@@ -5,7 +5,7 @@ import env from '@fastify/env';
import cookie from '@fastify/cookie';
import path, { join } from 'path';
import autoLoad from '@fastify/autoload';
import { asyncExecShell, isDev, listSettings, prisma, version } from './lib/common';
import { asyncExecShell, asyncSleep, isDev, listSettings, prisma, version } from './lib/common';
import { scheduler } from './lib/scheduler';
import axios from 'axios';
import compareVersions from 'compare-versions';
@@ -104,14 +104,16 @@ fastify.listen({ port, host }, async (err: any, address: any) => {
}
console.log(`Coolify's API is listening on ${host}:${port}`);
await initServer();
await scheduler.start('deployApplication');
await scheduler.start('cleanupStorage');
await scheduler.start('cleanupPrismaEngines');
await scheduler.start('checkProxies');
// Check if no build is running
setInterval(async () => {
if (!scheduler.workers.has('deployApplication')) {
scheduler.run('deployApplication');
}
}, 2000)
// Check for update
// Check for update & if no build is running
setInterval(async () => {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
if (isAutoUpdateEnabled) {
@@ -128,8 +130,8 @@ fastify.listen({ port, host }, async (err: any, address: any) => {
const latestVersion = versions['coolify'].main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isUpdateAvailable === 1) {
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("status:autoUpdater");
if (!scheduler.workers.has('deployApplication')) {
await scheduler.run('autoUpdater')
}
}
}
@@ -137,16 +139,11 @@ fastify.listen({ port, host }, async (err: any, address: any) => {
// Cleanup storage
setInterval(async () => {
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("status:cleanupStorage");
if (!scheduler.workers.has('deployApplication') && !scheduler.workers.has('cleanupStorage')) {
await scheduler.run('cleanupStorage')
}
}, isDev ? 5000 : 60000 * 10)
scheduler.on('worker deleted', async (name) => {
if (name === 'autoUpdater' || name === 'cleanupStorage') {
if (!scheduler.workers.has('deployApplication')) await scheduler.start('deployApplication');
}
});
await getArch();
await getIPAddress();
});
@@ -170,6 +167,12 @@ async function initServer() {
try {
await asyncExecShell(`docker network create --attachable coolify`);
} catch (error) { }
try {
const isOlder = compareVersions('3.8.1', version);
if (isOlder === -1) {
await prisma.build.updateMany({ where: { status: { in: ['running', 'queued'] } }, data: { status: 'failed' } });
}
} catch (error) { }
}
async function getArch() {
try {

View File

@@ -1,5 +1,5 @@
import { parentPort } from 'node:worker_threads';
import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma, version } from '../lib/common';
import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma } from '../lib/common';
(async () => {
if (parentPort) {
@@ -9,7 +9,7 @@ import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma,
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return
if (destination.engine) enginesDone.add(destination.engine)
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress)
let lowDiskSpace = false;
try {
let stdout = null

View File

@@ -0,0 +1,366 @@
import { parentPort } from 'node:worker_threads';
import crypto from 'crypto';
import fs from 'fs/promises';
import yaml from 'js-yaml';
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common';
import { createDirectories, decrypt, defaultComposeConfiguration, executeDockerCmd, getDomain, prisma } from '../lib/common';
import * as importers from '../lib/importers';
import * as buildpacks from '../lib/buildPacks';
(async () => {
if (parentPort) {
const concurrency = 1
const PQueue = await import('p-queue');
const queue = new PQueue.default({ concurrency });
parentPort.on('message', async (message) => {
if (parentPort) {
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
return;
}
if (message === 'status:autoUpdater') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'autoUpdater' });
return;
}
if (message === 'status:cleanupStorage') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'cleanupStorage' });
return;
}
if (message === 'action:flushQueue') {
queue.clear()
return;
}
await queue.add(async () => {
const {
id: applicationId,
repository,
name,
destinationDocker,
destinationDockerId,
gitSource,
build_id: buildId,
configHash,
fqdn,
projectId,
secrets,
phpModules,
type,
pullmergeRequestId = null,
sourceBranch = null,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
forceRebuild
} = message
let {
branch,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
denoMainFile
} = message
const currentHash = crypto
.createHash('sha256')
.update(
JSON.stringify({
pythonWSGI,
pythonModule,
pythonVariable,
deploymentType,
denoOptions,
baseImage,
baseBuildImage,
buildPack,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
secrets,
branch,
repository,
fqdn
})
)
.digest('hex');
try {
const { debug } = settings;
if (concurrency === 1) {
await prisma.build.updateMany({
where: {
status: { in: ['queued', 'running'] },
id: { not: buildId },
applicationId,
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
},
data: { status: 'failed' }
});
}
let imageId = applicationId;
let domain = getDomain(fqdn);
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
}) || [];
// Previews, we need to get the source branch and set subdomain
if (pullmergeRequestId) {
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
}
let deployNeeded = true;
let destinationType;
if (destinationDockerId) {
destinationType = 'docker';
}
if (destinationType === 'docker') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
const { workdir, repodir } = await createDirectories({ repository, buildId });
const configuration = await setDefaultConfiguration(message);
buildPack = configuration.buildPack;
port = configuration.port;
installCommand = configuration.installCommand;
startCommand = configuration.startCommand;
buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory;
dockerFileLocation = configuration.dockerFileLocation;
denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({
applicationId,
debug,
workdir,
repodir,
githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort,
repository,
branch,
buildId,
apiUrl: gitSource.apiUrl,
htmlUrl: gitSource.htmlUrl,
projectId,
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null,
forPublic: gitSource.forPublic
});
if (!commit) {
throw new Error('No commit found?');
}
let tag = commit.slice(0, 7);
if (pullmergeRequestId) {
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
}
try {
await prisma.build.update({ where: { id: buildId }, data: { commit } });
} catch (err) {
console.log(err);
}
if (!pullmergeRequestId) {
if (configHash !== currentHash) {
deployNeeded = true;
if (configHash) {
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
}
} else {
deployNeeded = false;
}
} else {
deployNeeded = true;
}
let imageFound = false;
try {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker image inspect ${applicationId}:${tag}`
})
imageFound = true;
} catch (error) {
//
}
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
if (forceRebuild) deployNeeded = true
if (!imageFound || deployNeeded) {
// if (true) {
if (buildpacks[buildPack])
await buildpacks[buildPack]({
dockerId: destinationDocker.id,
buildId,
applicationId,
domain,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
publishDirectory,
debug,
commit,
tag,
workdir,
port: exposePort ? `${exposePort}:${port}` : port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
secrets,
phpModules,
pythonWSGI,
pythonModule,
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage,
deploymentType
});
else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
throw new Error(`Build pack ${buildPack} not found.`);
}
} else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
}
try {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
} catch (error) {
//
}
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
...defaultComposeConfiguration(destinationDocker.network),
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
await prisma.build.updateMany({
where: { id: message.build_id, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: message.build_id }, data: { status: 'success' } });
if (!pullmergeRequestId) await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
}
}
catch (error) {
await prisma.build.updateMany({
where: { id: message.build_id, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
await saveBuildLog({ line: error, buildId, applicationId });
} finally {
await prisma.$disconnect();
}
});
await prisma.$disconnect();
}
});
} else process.exit(0);
})();

View File

@@ -10,357 +10,353 @@ import * as buildpacks from '../lib/buildPacks';
(async () => {
if (parentPort) {
const concurrency = 1
const PQueue = await import('p-queue');
const queue = new PQueue.default({ concurrency });
parentPort.on('message', async (message) => {
if (parentPort) {
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
return;
}
if (message === 'status:autoUpdater') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'autoUpdater' });
return;
}
if (message === 'status:cleanupStorage') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'cleanupStorage' });
return;
}
if (message === 'action:flushQueue') {
queue.clear()
return;
}
await queue.add(async () => {
const {
id: applicationId,
repository,
name,
destinationDocker,
destinationDockerId,
gitSource,
build_id: buildId,
configHash,
fqdn,
projectId,
secrets,
phpModules,
type,
pullmergeRequestId = null,
sourceBranch = null,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
forceRebuild
} = message
let {
branch,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
denoMainFile
} = message
const currentHash = crypto
.createHash('sha256')
.update(
JSON.stringify({
pythonWSGI,
pythonModule,
pythonVariable,
deploymentType,
denoOptions,
baseImage,
baseBuildImage,
buildPack,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
secrets,
branch,
repository,
fqdn
})
)
.digest('hex');
try {
const { debug } = settings;
if (concurrency === 1) {
await prisma.build.updateMany({
where: {
status: { in: ['queued', 'running'] },
id: { not: buildId },
applicationId,
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
},
data: { status: 'failed' }
});
}
let imageId = applicationId;
let domain = getDomain(fqdn);
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
}) || [];
// Previews, we need to get the source branch and set subdomain
if (pullmergeRequestId) {
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
}
let deployNeeded = true;
let destinationType;
if (destinationDockerId) {
destinationType = 'docker';
}
if (destinationType === 'docker') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
const { workdir, repodir } = await createDirectories({ repository, buildId });
const configuration = await setDefaultConfiguration(message);
buildPack = configuration.buildPack;
port = configuration.port;
installCommand = configuration.installCommand;
startCommand = configuration.startCommand;
buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory;
dockerFileLocation = configuration.dockerFileLocation;
denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({
applicationId,
debug,
workdir,
repodir,
githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort,
repository,
branch,
buildId,
apiUrl: gitSource.apiUrl,
htmlUrl: gitSource.htmlUrl,
projectId,
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null,
forPublic: gitSource.forPublic
});
if (!commit) {
throw new Error('No commit found?');
}
let tag = commit.slice(0, 7);
if (pullmergeRequestId) {
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
}
try {
await prisma.build.update({ where: { id: buildId }, data: { commit } });
} catch (err) {
console.log(err);
}
if (!pullmergeRequestId) {
if (configHash !== currentHash) {
deployNeeded = true;
if (configHash) {
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
}
} else {
deployNeeded = false;
}
} else {
deployNeeded = true;
}
let imageFound = false;
try {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker image inspect ${applicationId}:${tag}`
})
imageFound = true;
} catch (error) {
//
}
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
if (forceRebuild) deployNeeded = true
if (!imageFound || deployNeeded) {
// if (true) {
if (buildpacks[buildPack])
await buildpacks[buildPack]({
dockerId: destinationDocker.id,
buildId,
applicationId,
domain,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
publishDirectory,
debug,
commit,
tag,
workdir,
port: exposePort ? `${exposePort}:${port}` : port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
secrets,
phpModules,
pythonWSGI,
pythonModule,
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage,
deploymentType
});
else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
throw new Error(`Build pack ${buildPack} not found.`);
}
} else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
}
try {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
} catch (error) {
//
}
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
...defaultComposeConfiguration(destinationDocker.network),
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
await prisma.build.updateMany({
where: { id: message.build_id, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: message.build_id }, data: { status: 'success' } });
if (!pullmergeRequestId) await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
}
}
catch (error) {
await prisma.build.updateMany({
where: { id: message.build_id, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
await saveBuildLog({ line: error, buildId, applicationId });
} finally {
await prisma.$disconnect();
}
});
await prisma.$disconnect();
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
process.exit(0);
}
});
try {
parentPort.postMessage({ deploying: true });
const queuedBuilds = await prisma.build.findMany({ where: { status: 'queued' }, orderBy: { createdAt: 'asc' } });
const { concurrentBuilds } = await prisma.setting.findFirst({})
if (queuedBuilds.length > 0) {
const concurrency = concurrentBuilds;
const pAll = await import('p-all');
const actions = []
for (const queueBuild of queuedBuilds) {
actions.push(async () => {
const application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
const { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, forceRebuild } = queueBuild
const {
id: applicationId,
repository,
name,
destinationDocker,
destinationDockerId,
gitSource,
configHash,
fqdn,
projectId,
secrets,
phpModules,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
} = application
let {
branch,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
denoMainFile
} = application
const currentHash = crypto
.createHash('sha256')
.update(
JSON.stringify({
pythonWSGI,
pythonModule,
pythonVariable,
deploymentType,
denoOptions,
baseImage,
baseBuildImage,
buildPack,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
secrets,
branch,
repository,
fqdn
})
)
.digest('hex');
try {
const { debug } = settings;
if (concurrency === 1) {
await prisma.build.updateMany({
where: {
status: { in: ['queued', 'running'] },
id: { not: buildId },
applicationId,
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
},
data: { status: 'failed' }
});
}
let imageId = applicationId;
let domain = getDomain(fqdn);
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
}) || [];
// Previews, we need to get the source branch and set subdomain
if (pullmergeRequestId) {
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
}
let deployNeeded = true;
let destinationType;
if (destinationDockerId) {
destinationType = 'docker';
}
if (destinationType === 'docker') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
const { workdir, repodir } = await createDirectories({ repository, buildId });
const configuration = await setDefaultConfiguration(application);
buildPack = configuration.buildPack;
port = configuration.port;
installCommand = configuration.installCommand;
startCommand = configuration.startCommand;
buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory;
dockerFileLocation = configuration.dockerFileLocation;
denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({
applicationId,
debug,
workdir,
repodir,
githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort,
repository,
branch,
buildId,
apiUrl: gitSource.apiUrl,
htmlUrl: gitSource.htmlUrl,
projectId,
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null,
forPublic: gitSource.forPublic
});
if (!commit) {
throw new Error('No commit found?');
}
let tag = commit.slice(0, 7);
if (pullmergeRequestId) {
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
}
try {
await prisma.build.update({ where: { id: buildId }, data: { commit } });
} catch (err) {
console.log(err);
}
if (!pullmergeRequestId) {
if (configHash !== currentHash) {
deployNeeded = true;
if (configHash) {
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
}
} else {
deployNeeded = false;
}
} else {
deployNeeded = true;
}
let imageFound = false;
try {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker image inspect ${applicationId}:${tag}`
})
imageFound = true;
} catch (error) {
//
}
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
if (forceRebuild) deployNeeded = true
if (!imageFound || deployNeeded) {
// if (true) {
if (buildpacks[buildPack])
await buildpacks[buildPack]({
dockerId: destinationDocker.id,
buildId,
applicationId,
domain,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
publishDirectory,
debug,
commit,
tag,
workdir,
port: exposePort ? `${exposePort}:${port}` : port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
secrets,
phpModules,
pythonWSGI,
pythonModule,
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage,
deploymentType
});
else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
throw new Error(`Build pack ${buildPack} not found.`);
}
} else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
}
try {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
} catch (error) {
//
}
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
...defaultComposeConfiguration(destinationDocker.network),
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
await prisma.build.updateMany({
where: { id: buildId, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
if (!pullmergeRequestId) await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
}
}
catch (error) {
await prisma.build.updateMany({
where: { id: buildId, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
await saveBuildLog({ line: error, buildId, applicationId });
}
});
}
await pAll.default(actions, { concurrency })
}
} catch (error) {
process.exit(0);
} finally {
await prisma.$disconnect();
process.exit(0);
}
} else process.exit(0);
})();

View File

@@ -553,7 +553,7 @@ export async function buildImage({
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker build --progress plain -f ${workdir}/${dockerFile} -t ${cache} ${workdir}` })
const { status } = await prisma.build.findUnique({ where: { id: buildId } })
if (status === 'canceled') {
throw new Error('Build canceled.')
throw new Error('Deployment canceled.')
}
if (isCache) {
await saveBuildLog({ line: `Building cache image successful.`, buildId, applicationId });

View File

@@ -93,7 +93,6 @@ export const asyncExecShellStream = async ({ debug, buildId, applicationId, comm
const { execaCommand } = await import('execa')
const subprocess = execaCommand(command, { env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine } })
if (debug) {
await saveBuildLog({ line: `=========================`, buildId, applicationId });
subprocess.stdout.on('data', async (data) => {
const stdout = data.toString();
const array = stdout.split('\n')
@@ -123,7 +122,6 @@ export const asyncExecShellStream = async ({ debug, buildId, applicationId, comm
}
subprocess.on('exit', async (code) => {
await asyncSleep(1000);
await saveBuildLog({ line: `=========================`, buildId, applicationId });
if (code === 0) {
resolve(code)
} else {
@@ -1871,7 +1869,7 @@ export async function stopBuild(buildId, applicationId) {
let count = 0;
await new Promise<void>(async (resolve, reject) => {
const { destinationDockerId, status } = await prisma.build.findFirst({ where: { id: buildId } });
const { engine, id: dockerId } = await prisma.destinationDocker.findFirst({ where: { id: destinationDockerId } });
const { id: dockerId } = await prisma.destinationDocker.findFirst({ where: { id: destinationDockerId } });
const interval = setInterval(async () => {
try {
if (status === 'failed' || status === 'canceled') {
@@ -1881,10 +1879,10 @@ export async function stopBuild(buildId, applicationId) {
if (count > 15) {
clearInterval(interval);
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("action:flushQueue")
scheduler.workers.get('deployApplication').postMessage('cancel')
}
await cleanupDB(buildId);
return reject(new Error('Build canceled'));
await cleanupDB(buildId, applicationId);
return reject(new Error('Deployment canceled.'));
}
const { stdout: buildContainers } = await executeDockerCmd({ dockerId, command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'` })
if (buildContainers) {
@@ -1896,9 +1894,9 @@ export async function stopBuild(buildId, applicationId) {
await removeContainer({ id, dockerId });
clearInterval(interval);
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("action:flushQueue")
scheduler.workers.get('deployApplication').postMessage('cancel')
}
await cleanupDB(buildId);
await cleanupDB(buildId, applicationId);
return resolve();
}
}
@@ -1909,11 +1907,12 @@ export async function stopBuild(buildId, applicationId) {
});
}
async function cleanupDB(buildId: string) {
async function cleanupDB(buildId: string, applicationId: string) {
const data = await prisma.build.findUnique({ where: { id: buildId } });
if (data?.status === 'queued' || data?.status === 'running') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'canceled' } });
}
await saveBuildLog({ line: 'Deployment canceled.', buildId, applicationId });
}
export function convertTolOldVolumeNames(type) {

View File

@@ -9,27 +9,17 @@ Bree.extend(TSBree);
const options: any = {
defaultExtension: 'js',
logger: false,
logger: new Cabin(),
workerMessageHandler: async ({ name, message }) => {
if (name === 'deployApplication') {
if (message.pending === 0 && message.size === 0) {
if (message.caller === 'autoUpdater') {
if (!scheduler.workers.has('autoUpdater')) {
await scheduler.run('autoUpdater')
}
}
if (message.caller === 'cleanupStorage') {
if (!scheduler.workers.has('cleanupStorage')) {
await scheduler.run('cleanupStorage')
}
}
if (name === 'deployApplication' && message?.deploying) {
if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
scheduler.workers.get('deployApplication').postMessage('cancel')
}
}
},
jobs: [
{
name: 'deployApplication'
name: 'deployApplication',
},
{
name: 'cleanupStorage',

View File

@@ -75,7 +75,6 @@ export async function getApplicationStatus(request: FastifyRequest<OnlyId>) {
isExited = await isContainerExited(application.destinationDocker.id, id);
}
return {
isQueueActive: scheduler.workers.has('deployApplication'),
isRunning,
isExited,
};
@@ -453,6 +452,8 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
id: buildId,
applicationId: id,
branch: application.branch,
pullmergeRequestId,
forceRebuild,
destinationDockerId: application.destinationDocker?.id,
gitSourceId: application.gitSource?.id,
githubAppId: application.gitSource?.githubApp?.id,
@@ -461,24 +462,6 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
type: 'manual'
}
});
if (pullmergeRequestId) {
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'manual',
...application,
sourceBranch: branch,
pullmergeRequestId,
forceRebuild
});
} else {
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'manual',
...application,
forceRebuild
});
}
return {
buildId
};

View File

@@ -142,12 +142,6 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
type: 'webhook_commit'
}
});
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'webhook_commit',
...applicationFound
});
return {
message: 'Queued. Thank you!'
};
@@ -183,6 +177,8 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
await prisma.build.create({
data: {
id: buildId,
pullmergeRequestId,
sourceBranch,
applicationId: applicationFound.id,
destinationDockerId: applicationFound.destinationDocker.id,
gitSourceId: applicationFound.gitSource.id,
@@ -192,14 +188,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
type: 'webhook_pr'
}
});
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'webhook_pr',
...applicationFound,
sourceBranch,
pullmergeRequestId
});
return {
message: 'Queued. Thank you!'
};

View File

@@ -89,12 +89,6 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
}
});
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'webhook_commit',
...applicationFound
});
return {
message: 'Queued. Thank you!'
};
@@ -141,6 +135,8 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
await prisma.build.create({
data: {
id: buildId,
pullmergeRequestId,
sourceBranch,
applicationId: applicationFound.id,
destinationDockerId: applicationFound.destinationDocker.id,
gitSourceId: applicationFound.gitSource.id,
@@ -150,14 +146,6 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
type: 'webhook_mr'
}
});
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'webhook_mr',
...applicationFound,
sourceBranch,
pullmergeRequestId
});
return {
message: 'Queued. Thank you!'
};

View File

@@ -14,20 +14,20 @@
"format": "prettier --write --plugin-search-dir=. ."
},
"devDependencies": {
"@playwright/test": "1.24.2",
"@playwright/test": "1.25.1",
"@sveltejs/kit": "1.0.0-next.405",
"@types/js-cookie": "3.0.2",
"@typescript-eslint/eslint-plugin": "5.33.0",
"@typescript-eslint/parser": "5.33.0",
"@typescript-eslint/eslint-plugin": "5.35.1",
"@typescript-eslint/parser": "5.35.1",
"autoprefixer": "10.4.8",
"eslint": "8.21.0",
"eslint": "8.22.0",
"eslint-config-prettier": "8.5.0",
"eslint-plugin-svelte3": "4.0.0",
"postcss": "8.4.16",
"prettier": "2.7.1",
"prettier-plugin-svelte": "2.7.0",
"svelte": "3.49.0",
"svelte-check": "2.8.0",
"svelte-check": "2.8.1",
"svelte-preprocess": "4.10.7",
"tailwindcss": "3.1.8",
"tailwindcss-scrollbar": "0.1.0",
@@ -39,7 +39,7 @@
"dependencies": {
"@sveltejs/adapter-static": "1.0.0-next.39",
"cuid": "2.1.8",
"daisyui": "2.22.0",
"daisyui": "2.24.0",
"js-cookie": "3.0.1",
"p-limit": "4.0.0",
"svelte-select": "4.4.7",

View File

@@ -66,7 +66,6 @@
let loading = false;
let statusInterval: any;
let isQueueActive = false;
$disabledButton =
!$appSession.isAdmin ||
(!application.fqdn && !application.settings.isBot) ||
@@ -121,7 +120,6 @@
if ($status.application.loading) return;
$status.application.loading = true;
const data = await get(`/applications/${id}/status`);
isQueueActive = data.isQueueActive;
$status.application.isRunning = data.isRunning;
$status.application.isExited = data.isExited;
$status.application.loading = false;
@@ -259,13 +257,10 @@
<form on:submit|preventDefault={() => handleDeploySubmit(true)}>
<button
type="submit"
disabled={$disabledButton || !isQueueActive}
class:hover:text-green-500={isQueueActive}
disabled={$disabledButton}
class="icons bg-transparent tooltip tooltip-primary tooltip-bottom text-sm flex items-center space-x-2"
data-tip={$appSession.isAdmin
? isQueueActive
? 'Force Rebuild Application'
: 'Autoupdate inprogress. Cannot rebuild application.'
? 'Force Rebuild Application'
: 'You do not have permission to rebuild application.'}
>
<svg
@@ -287,7 +282,7 @@
</button>
</form>
{:else}
<form on:submit|preventDefault={handleDeploySubmit}>
<form on:submit|preventDefault={() => handleDeploySubmit(false)}>
<button
type="submit"
disabled={$disabledButton}
@@ -359,7 +354,7 @@
<button
disabled={$disabledButton}
class="icons bg-transparent tooltip tooltip-primary tooltip-bottom text-sm"
data-tip="Secret"
data-tip="Secrets"
>
<svg
xmlns="http://www.w3.org/2000/svg"

View File

@@ -178,7 +178,7 @@
{#if !noMoreBuilds}
{#if buildCount > 5}
<div class="flex space-x-2">
<button disabled={noMoreBuilds} class="w-full" on:click={loadMoreBuilds}
<button disabled={noMoreBuilds} class=" btn btn-sm w-full" on:click={loadMoreBuilds}
>{$t('application.build.load_more')}</button
>
</div>