fix: build queue system

This commit is contained in:
Andras Bacsai
2022-08-25 10:04:46 +02:00
parent f379519d40
commit 01e71958b2
18 changed files with 915 additions and 753 deletions

View File

@@ -5,7 +5,7 @@ import env from '@fastify/env';
import cookie from '@fastify/cookie';
import path, { join } from 'path';
import autoLoad from '@fastify/autoload';
import { asyncExecShell, isDev, listSettings, prisma, version } from './lib/common';
import { asyncExecShell, asyncSleep, isDev, listSettings, prisma, version } from './lib/common';
import { scheduler } from './lib/scheduler';
import axios from 'axios';
import compareVersions from 'compare-versions';
@@ -104,14 +104,16 @@ fastify.listen({ port, host }, async (err: any, address: any) => {
}
console.log(`Coolify's API is listening on ${host}:${port}`);
await initServer();
await scheduler.start('deployApplication');
await scheduler.start('cleanupStorage');
await scheduler.start('cleanupPrismaEngines');
await scheduler.start('checkProxies');
// Check if no build is running
setInterval(async () => {
if (!scheduler.workers.has('deployApplication')) {
scheduler.run('deployApplication');
}
}, 2000)
// Check for update
// Check for update & if no build is running
setInterval(async () => {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
if (isAutoUpdateEnabled) {
@@ -128,8 +130,8 @@ fastify.listen({ port, host }, async (err: any, address: any) => {
const latestVersion = versions['coolify'].main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isUpdateAvailable === 1) {
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("status:autoUpdater");
if (!scheduler.workers.has('deployApplication')) {
await scheduler.run('autoUpdater')
}
}
}
@@ -137,16 +139,11 @@ fastify.listen({ port, host }, async (err: any, address: any) => {
// Cleanup storage
setInterval(async () => {
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("status:cleanupStorage");
if (!scheduler.workers.has('deployApplication') && !scheduler.workers.has('cleanupStorage')) {
await scheduler.run('cleanupStorage')
}
}, isDev ? 5000 : 60000 * 10)
scheduler.on('worker deleted', async (name) => {
if (name === 'autoUpdater' || name === 'cleanupStorage') {
if (!scheduler.workers.has('deployApplication')) await scheduler.start('deployApplication');
}
});
await getArch();
await getIPAddress();
});
@@ -170,6 +167,12 @@ async function initServer() {
try {
await asyncExecShell(`docker network create --attachable coolify`);
} catch (error) { }
try {
const isOlder = compareVersions('3.8.1', version);
if (isOlder === -1) {
await prisma.build.updateMany({ where: { status: { in: ['running', 'queued'] } }, data: { status: 'failed' } });
}
} catch (error) { }
}
async function getArch() {
try {

View File

@@ -1,5 +1,5 @@
import { parentPort } from 'node:worker_threads';
import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma, version } from '../lib/common';
import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma } from '../lib/common';
(async () => {
if (parentPort) {
@@ -9,7 +9,7 @@ import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma,
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return
if (destination.engine) enginesDone.add(destination.engine)
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress)
let lowDiskSpace = false;
try {
let stdout = null

View File

@@ -0,0 +1,366 @@
import { parentPort } from 'node:worker_threads';
import crypto from 'crypto';
import fs from 'fs/promises';
import yaml from 'js-yaml';
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common';
import { createDirectories, decrypt, defaultComposeConfiguration, executeDockerCmd, getDomain, prisma } from '../lib/common';
import * as importers from '../lib/importers';
import * as buildpacks from '../lib/buildPacks';
(async () => {
if (parentPort) {
const concurrency = 1
const PQueue = await import('p-queue');
const queue = new PQueue.default({ concurrency });
parentPort.on('message', async (message) => {
if (parentPort) {
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
return;
}
if (message === 'status:autoUpdater') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'autoUpdater' });
return;
}
if (message === 'status:cleanupStorage') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'cleanupStorage' });
return;
}
if (message === 'action:flushQueue') {
queue.clear()
return;
}
await queue.add(async () => {
const {
id: applicationId,
repository,
name,
destinationDocker,
destinationDockerId,
gitSource,
build_id: buildId,
configHash,
fqdn,
projectId,
secrets,
phpModules,
type,
pullmergeRequestId = null,
sourceBranch = null,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
forceRebuild
} = message
let {
branch,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
denoMainFile
} = message
const currentHash = crypto
.createHash('sha256')
.update(
JSON.stringify({
pythonWSGI,
pythonModule,
pythonVariable,
deploymentType,
denoOptions,
baseImage,
baseBuildImage,
buildPack,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
secrets,
branch,
repository,
fqdn
})
)
.digest('hex');
try {
const { debug } = settings;
if (concurrency === 1) {
await prisma.build.updateMany({
where: {
status: { in: ['queued', 'running'] },
id: { not: buildId },
applicationId,
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
},
data: { status: 'failed' }
});
}
let imageId = applicationId;
let domain = getDomain(fqdn);
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
}) || [];
// Previews, we need to get the source branch and set subdomain
if (pullmergeRequestId) {
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
}
let deployNeeded = true;
let destinationType;
if (destinationDockerId) {
destinationType = 'docker';
}
if (destinationType === 'docker') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
const { workdir, repodir } = await createDirectories({ repository, buildId });
const configuration = await setDefaultConfiguration(message);
buildPack = configuration.buildPack;
port = configuration.port;
installCommand = configuration.installCommand;
startCommand = configuration.startCommand;
buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory;
dockerFileLocation = configuration.dockerFileLocation;
denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({
applicationId,
debug,
workdir,
repodir,
githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort,
repository,
branch,
buildId,
apiUrl: gitSource.apiUrl,
htmlUrl: gitSource.htmlUrl,
projectId,
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null,
forPublic: gitSource.forPublic
});
if (!commit) {
throw new Error('No commit found?');
}
let tag = commit.slice(0, 7);
if (pullmergeRequestId) {
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
}
try {
await prisma.build.update({ where: { id: buildId }, data: { commit } });
} catch (err) {
console.log(err);
}
if (!pullmergeRequestId) {
if (configHash !== currentHash) {
deployNeeded = true;
if (configHash) {
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
}
} else {
deployNeeded = false;
}
} else {
deployNeeded = true;
}
let imageFound = false;
try {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker image inspect ${applicationId}:${tag}`
})
imageFound = true;
} catch (error) {
//
}
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
if (forceRebuild) deployNeeded = true
if (!imageFound || deployNeeded) {
// if (true) {
if (buildpacks[buildPack])
await buildpacks[buildPack]({
dockerId: destinationDocker.id,
buildId,
applicationId,
domain,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
publishDirectory,
debug,
commit,
tag,
workdir,
port: exposePort ? `${exposePort}:${port}` : port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
secrets,
phpModules,
pythonWSGI,
pythonModule,
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage,
deploymentType
});
else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
throw new Error(`Build pack ${buildPack} not found.`);
}
} else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
}
try {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
} catch (error) {
//
}
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
...defaultComposeConfiguration(destinationDocker.network),
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
await prisma.build.updateMany({
where: { id: message.build_id, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: message.build_id }, data: { status: 'success' } });
if (!pullmergeRequestId) await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
}
}
catch (error) {
await prisma.build.updateMany({
where: { id: message.build_id, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
await saveBuildLog({ line: error, buildId, applicationId });
} finally {
await prisma.$disconnect();
}
});
await prisma.$disconnect();
}
});
} else process.exit(0);
})();

View File

@@ -10,357 +10,353 @@ import * as buildpacks from '../lib/buildPacks';
(async () => {
if (parentPort) {
const concurrency = 1
const PQueue = await import('p-queue');
const queue = new PQueue.default({ concurrency });
parentPort.on('message', async (message) => {
if (parentPort) {
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
return;
}
if (message === 'status:autoUpdater') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'autoUpdater' });
return;
}
if (message === 'status:cleanupStorage') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'cleanupStorage' });
return;
}
if (message === 'action:flushQueue') {
queue.clear()
return;
}
await queue.add(async () => {
const {
id: applicationId,
repository,
name,
destinationDocker,
destinationDockerId,
gitSource,
build_id: buildId,
configHash,
fqdn,
projectId,
secrets,
phpModules,
type,
pullmergeRequestId = null,
sourceBranch = null,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
forceRebuild
} = message
let {
branch,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
denoMainFile
} = message
const currentHash = crypto
.createHash('sha256')
.update(
JSON.stringify({
pythonWSGI,
pythonModule,
pythonVariable,
deploymentType,
denoOptions,
baseImage,
baseBuildImage,
buildPack,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
secrets,
branch,
repository,
fqdn
})
)
.digest('hex');
try {
const { debug } = settings;
if (concurrency === 1) {
await prisma.build.updateMany({
where: {
status: { in: ['queued', 'running'] },
id: { not: buildId },
applicationId,
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
},
data: { status: 'failed' }
});
}
let imageId = applicationId;
let domain = getDomain(fqdn);
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
}) || [];
// Previews, we need to get the source branch and set subdomain
if (pullmergeRequestId) {
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
}
let deployNeeded = true;
let destinationType;
if (destinationDockerId) {
destinationType = 'docker';
}
if (destinationType === 'docker') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
const { workdir, repodir } = await createDirectories({ repository, buildId });
const configuration = await setDefaultConfiguration(message);
buildPack = configuration.buildPack;
port = configuration.port;
installCommand = configuration.installCommand;
startCommand = configuration.startCommand;
buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory;
dockerFileLocation = configuration.dockerFileLocation;
denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({
applicationId,
debug,
workdir,
repodir,
githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort,
repository,
branch,
buildId,
apiUrl: gitSource.apiUrl,
htmlUrl: gitSource.htmlUrl,
projectId,
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null,
forPublic: gitSource.forPublic
});
if (!commit) {
throw new Error('No commit found?');
}
let tag = commit.slice(0, 7);
if (pullmergeRequestId) {
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
}
try {
await prisma.build.update({ where: { id: buildId }, data: { commit } });
} catch (err) {
console.log(err);
}
if (!pullmergeRequestId) {
if (configHash !== currentHash) {
deployNeeded = true;
if (configHash) {
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
}
} else {
deployNeeded = false;
}
} else {
deployNeeded = true;
}
let imageFound = false;
try {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker image inspect ${applicationId}:${tag}`
})
imageFound = true;
} catch (error) {
//
}
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
if (forceRebuild) deployNeeded = true
if (!imageFound || deployNeeded) {
// if (true) {
if (buildpacks[buildPack])
await buildpacks[buildPack]({
dockerId: destinationDocker.id,
buildId,
applicationId,
domain,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
publishDirectory,
debug,
commit,
tag,
workdir,
port: exposePort ? `${exposePort}:${port}` : port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
secrets,
phpModules,
pythonWSGI,
pythonModule,
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage,
deploymentType
});
else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
throw new Error(`Build pack ${buildPack} not found.`);
}
} else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
}
try {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
} catch (error) {
//
}
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
...defaultComposeConfiguration(destinationDocker.network),
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
await prisma.build.updateMany({
where: { id: message.build_id, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: message.build_id }, data: { status: 'success' } });
if (!pullmergeRequestId) await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
}
}
catch (error) {
await prisma.build.updateMany({
where: { id: message.build_id, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
await saveBuildLog({ line: error, buildId, applicationId });
} finally {
await prisma.$disconnect();
}
});
await prisma.$disconnect();
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
process.exit(0);
}
});
try {
parentPort.postMessage({ deploying: true });
const queuedBuilds = await prisma.build.findMany({ where: { status: 'queued' }, orderBy: { createdAt: 'asc' } });
const { concurrentBuilds } = await prisma.setting.findFirst({})
if (queuedBuilds.length > 0) {
const concurrency = concurrentBuilds;
const pAll = await import('p-all');
const actions = []
for (const queueBuild of queuedBuilds) {
actions.push(async () => {
const application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
const { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, forceRebuild } = queueBuild
const {
id: applicationId,
repository,
name,
destinationDocker,
destinationDockerId,
gitSource,
configHash,
fqdn,
projectId,
secrets,
phpModules,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
} = application
let {
branch,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
denoMainFile
} = application
const currentHash = crypto
.createHash('sha256')
.update(
JSON.stringify({
pythonWSGI,
pythonModule,
pythonVariable,
deploymentType,
denoOptions,
baseImage,
baseBuildImage,
buildPack,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
secrets,
branch,
repository,
fqdn
})
)
.digest('hex');
try {
const { debug } = settings;
if (concurrency === 1) {
await prisma.build.updateMany({
where: {
status: { in: ['queued', 'running'] },
id: { not: buildId },
applicationId,
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
},
data: { status: 'failed' }
});
}
let imageId = applicationId;
let domain = getDomain(fqdn);
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
}) || [];
// Previews, we need to get the source branch and set subdomain
if (pullmergeRequestId) {
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
}
let deployNeeded = true;
let destinationType;
if (destinationDockerId) {
destinationType = 'docker';
}
if (destinationType === 'docker') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
const { workdir, repodir } = await createDirectories({ repository, buildId });
const configuration = await setDefaultConfiguration(application);
buildPack = configuration.buildPack;
port = configuration.port;
installCommand = configuration.installCommand;
startCommand = configuration.startCommand;
buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory;
dockerFileLocation = configuration.dockerFileLocation;
denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({
applicationId,
debug,
workdir,
repodir,
githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort,
repository,
branch,
buildId,
apiUrl: gitSource.apiUrl,
htmlUrl: gitSource.htmlUrl,
projectId,
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null,
forPublic: gitSource.forPublic
});
if (!commit) {
throw new Error('No commit found?');
}
let tag = commit.slice(0, 7);
if (pullmergeRequestId) {
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
}
try {
await prisma.build.update({ where: { id: buildId }, data: { commit } });
} catch (err) {
console.log(err);
}
if (!pullmergeRequestId) {
if (configHash !== currentHash) {
deployNeeded = true;
if (configHash) {
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
}
} else {
deployNeeded = false;
}
} else {
deployNeeded = true;
}
let imageFound = false;
try {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker image inspect ${applicationId}:${tag}`
})
imageFound = true;
} catch (error) {
//
}
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
if (forceRebuild) deployNeeded = true
if (!imageFound || deployNeeded) {
// if (true) {
if (buildpacks[buildPack])
await buildpacks[buildPack]({
dockerId: destinationDocker.id,
buildId,
applicationId,
domain,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
publishDirectory,
debug,
commit,
tag,
workdir,
port: exposePort ? `${exposePort}:${port}` : port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
secrets,
phpModules,
pythonWSGI,
pythonModule,
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage,
deploymentType
});
else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
throw new Error(`Build pack ${buildPack} not found.`);
}
} else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
}
try {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
} catch (error) {
//
}
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
...defaultComposeConfiguration(destinationDocker.network),
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
await prisma.build.updateMany({
where: { id: buildId, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
if (!pullmergeRequestId) await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
}
}
catch (error) {
await prisma.build.updateMany({
where: { id: buildId, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
await saveBuildLog({ line: error, buildId, applicationId });
}
});
}
await pAll.default(actions, { concurrency })
}
} catch (error) {
process.exit(0);
} finally {
await prisma.$disconnect();
process.exit(0);
}
} else process.exit(0);
})();

View File

@@ -553,7 +553,7 @@ export async function buildImage({
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker build --progress plain -f ${workdir}/${dockerFile} -t ${cache} ${workdir}` })
const { status } = await prisma.build.findUnique({ where: { id: buildId } })
if (status === 'canceled') {
throw new Error('Build canceled.')
throw new Error('Deployment canceled.')
}
if (isCache) {
await saveBuildLog({ line: `Building cache image successful.`, buildId, applicationId });

View File

@@ -93,7 +93,6 @@ export const asyncExecShellStream = async ({ debug, buildId, applicationId, comm
const { execaCommand } = await import('execa')
const subprocess = execaCommand(command, { env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine } })
if (debug) {
await saveBuildLog({ line: `=========================`, buildId, applicationId });
subprocess.stdout.on('data', async (data) => {
const stdout = data.toString();
const array = stdout.split('\n')
@@ -123,7 +122,6 @@ export const asyncExecShellStream = async ({ debug, buildId, applicationId, comm
}
subprocess.on('exit', async (code) => {
await asyncSleep(1000);
await saveBuildLog({ line: `=========================`, buildId, applicationId });
if (code === 0) {
resolve(code)
} else {
@@ -1871,7 +1869,7 @@ export async function stopBuild(buildId, applicationId) {
let count = 0;
await new Promise<void>(async (resolve, reject) => {
const { destinationDockerId, status } = await prisma.build.findFirst({ where: { id: buildId } });
const { engine, id: dockerId } = await prisma.destinationDocker.findFirst({ where: { id: destinationDockerId } });
const { id: dockerId } = await prisma.destinationDocker.findFirst({ where: { id: destinationDockerId } });
const interval = setInterval(async () => {
try {
if (status === 'failed' || status === 'canceled') {
@@ -1881,10 +1879,10 @@ export async function stopBuild(buildId, applicationId) {
if (count > 15) {
clearInterval(interval);
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("action:flushQueue")
scheduler.workers.get('deployApplication').postMessage('cancel')
}
await cleanupDB(buildId);
return reject(new Error('Build canceled'));
await cleanupDB(buildId, applicationId);
return reject(new Error('Deployment canceled.'));
}
const { stdout: buildContainers } = await executeDockerCmd({ dockerId, command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'` })
if (buildContainers) {
@@ -1896,9 +1894,9 @@ export async function stopBuild(buildId, applicationId) {
await removeContainer({ id, dockerId });
clearInterval(interval);
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("action:flushQueue")
scheduler.workers.get('deployApplication').postMessage('cancel')
}
await cleanupDB(buildId);
await cleanupDB(buildId, applicationId);
return resolve();
}
}
@@ -1909,11 +1907,12 @@ export async function stopBuild(buildId, applicationId) {
});
}
async function cleanupDB(buildId: string) {
async function cleanupDB(buildId: string, applicationId: string) {
const data = await prisma.build.findUnique({ where: { id: buildId } });
if (data?.status === 'queued' || data?.status === 'running') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'canceled' } });
}
await saveBuildLog({ line: 'Deployment canceled.', buildId, applicationId });
}
export function convertTolOldVolumeNames(type) {

View File

@@ -9,27 +9,17 @@ Bree.extend(TSBree);
const options: any = {
defaultExtension: 'js',
logger: false,
logger: new Cabin(),
workerMessageHandler: async ({ name, message }) => {
if (name === 'deployApplication') {
if (message.pending === 0 && message.size === 0) {
if (message.caller === 'autoUpdater') {
if (!scheduler.workers.has('autoUpdater')) {
await scheduler.run('autoUpdater')
}
}
if (message.caller === 'cleanupStorage') {
if (!scheduler.workers.has('cleanupStorage')) {
await scheduler.run('cleanupStorage')
}
}
if (name === 'deployApplication' && message?.deploying) {
if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
scheduler.workers.get('deployApplication').postMessage('cancel')
}
}
},
jobs: [
{
name: 'deployApplication'
name: 'deployApplication',
},
{
name: 'cleanupStorage',

View File

@@ -75,7 +75,6 @@ export async function getApplicationStatus(request: FastifyRequest<OnlyId>) {
isExited = await isContainerExited(application.destinationDocker.id, id);
}
return {
isQueueActive: scheduler.workers.has('deployApplication'),
isRunning,
isExited,
};
@@ -453,6 +452,8 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
id: buildId,
applicationId: id,
branch: application.branch,
pullmergeRequestId,
forceRebuild,
destinationDockerId: application.destinationDocker?.id,
gitSourceId: application.gitSource?.id,
githubAppId: application.gitSource?.githubApp?.id,
@@ -461,24 +462,6 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
type: 'manual'
}
});
if (pullmergeRequestId) {
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'manual',
...application,
sourceBranch: branch,
pullmergeRequestId,
forceRebuild
});
} else {
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'manual',
...application,
forceRebuild
});
}
return {
buildId
};

View File

@@ -142,12 +142,6 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
type: 'webhook_commit'
}
});
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'webhook_commit',
...applicationFound
});
return {
message: 'Queued. Thank you!'
};
@@ -183,6 +177,8 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
await prisma.build.create({
data: {
id: buildId,
pullmergeRequestId,
sourceBranch,
applicationId: applicationFound.id,
destinationDockerId: applicationFound.destinationDocker.id,
gitSourceId: applicationFound.gitSource.id,
@@ -192,14 +188,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
type: 'webhook_pr'
}
});
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'webhook_pr',
...applicationFound,
sourceBranch,
pullmergeRequestId
});
return {
message: 'Queued. Thank you!'
};

View File

@@ -89,12 +89,6 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
}
});
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'webhook_commit',
...applicationFound
});
return {
message: 'Queued. Thank you!'
};
@@ -141,6 +135,8 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
await prisma.build.create({
data: {
id: buildId,
pullmergeRequestId,
sourceBranch,
applicationId: applicationFound.id,
destinationDockerId: applicationFound.destinationDocker.id,
gitSourceId: applicationFound.gitSource.id,
@@ -150,14 +146,6 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
type: 'webhook_mr'
}
});
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'webhook_mr',
...applicationFound,
sourceBranch,
pullmergeRequestId
});
return {
message: 'Queued. Thank you!'
};