wip: trpc
This commit is contained in:
@@ -14,17 +14,16 @@ import {
|
||||
import {
|
||||
createDirectories,
|
||||
decrypt,
|
||||
defaultComposeConfiguration,
|
||||
getDomain,
|
||||
prisma,
|
||||
generateSecrets,
|
||||
decryptApplication,
|
||||
isDev,
|
||||
pushToRegistry,
|
||||
executeCommand,
|
||||
generateSecrets
|
||||
pushToRegistry
|
||||
} from '../lib/common';
|
||||
import * as importers from '../lib/importers';
|
||||
import * as buildpacks from '../lib/buildPacks';
|
||||
import { prisma } from '../prisma';
|
||||
import { executeCommand } from '../lib/executeCommand';
|
||||
import { defaultComposeConfiguration } from '../lib/docker';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
@@ -532,6 +531,48 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
});
|
||||
if (forceRebuild) deployNeeded = true;
|
||||
if ((!imageFoundLocally && !imageFoundRemotely) || deployNeeded) {
|
||||
if (buildPack === 'static') {
|
||||
await buildpacks.staticApp({
|
||||
dockerId: destinationDocker.id,
|
||||
network: destinationDocker.network,
|
||||
buildId,
|
||||
applicationId,
|
||||
domain,
|
||||
name,
|
||||
type,
|
||||
volumes,
|
||||
labels,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
publishDirectory,
|
||||
debug,
|
||||
commit,
|
||||
tag,
|
||||
workdir,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
phpModules,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
dockerFileLocation,
|
||||
dockerComposeConfiguration,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
forceRebuild
|
||||
});
|
||||
}
|
||||
if (buildpacks[buildPack])
|
||||
await buildpacks[buildPack]({
|
||||
dockerId: destinationDocker.id,
|
||||
@@ -803,5 +844,8 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
while (true) {
|
||||
await th();
|
||||
}
|
||||
} else process.exit(0);
|
||||
} else {
|
||||
console.log('hello');
|
||||
process.exit(0);
|
||||
}
|
||||
})();
|
||||
@@ -1,9 +0,0 @@
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import process from 'node:process';
|
||||
|
||||
console.log('Hello TypeScript!');
|
||||
|
||||
// signal to parent that the job is done
|
||||
if (parentPort) parentPort.postMessage('done');
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
else process.exit(0);
|
||||
843
apps/server/src/lib/buildPacks/common.ts
Normal file
843
apps/server/src/lib/buildPacks/common.ts
Normal file
@@ -0,0 +1,843 @@
|
||||
import {
|
||||
base64Encode,
|
||||
decrypt,
|
||||
encrypt,
|
||||
generateSecrets,
|
||||
generateTimestamp,
|
||||
getDomain,
|
||||
isARM,
|
||||
isDev,
|
||||
version
|
||||
} from '../common';
|
||||
import { promises as fs } from 'fs';
|
||||
import { day } from '../dayjs';
|
||||
import { prisma } from '../../prisma';
|
||||
import { executeCommand } from '../executeCommand';
|
||||
|
||||
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
|
||||
const nodeBased = [
|
||||
'react',
|
||||
'preact',
|
||||
'vuejs',
|
||||
'svelte',
|
||||
'gatsby',
|
||||
'astro',
|
||||
'eleventy',
|
||||
'node',
|
||||
'nestjs',
|
||||
'nuxtjs',
|
||||
'nextjs'
|
||||
];
|
||||
|
||||
export function setDefaultBaseImage(
|
||||
buildPack: string | null,
|
||||
deploymentType: string | null = null
|
||||
) {
|
||||
const nodeVersions = [
|
||||
{
|
||||
value: 'node:lts',
|
||||
label: 'node:lts'
|
||||
},
|
||||
{
|
||||
value: 'node:18',
|
||||
label: 'node:18'
|
||||
},
|
||||
{
|
||||
value: 'node:17',
|
||||
label: 'node:17'
|
||||
},
|
||||
{
|
||||
value: 'node:16',
|
||||
label: 'node:16'
|
||||
},
|
||||
{
|
||||
value: 'node:14',
|
||||
label: 'node:14'
|
||||
},
|
||||
{
|
||||
value: 'node:12',
|
||||
label: 'node:12'
|
||||
}
|
||||
];
|
||||
const staticVersions = [
|
||||
{
|
||||
value: 'webdevops/nginx:alpine',
|
||||
label: 'webdevops/nginx:alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/apache:alpine',
|
||||
label: 'webdevops/apache:alpine'
|
||||
},
|
||||
{
|
||||
value: 'nginx:alpine',
|
||||
label: 'nginx:alpine'
|
||||
},
|
||||
{
|
||||
value: 'httpd:alpine',
|
||||
label: 'httpd:alpine (Apache)'
|
||||
}
|
||||
];
|
||||
const rustVersions = [
|
||||
{
|
||||
value: 'rust:latest',
|
||||
label: 'rust:latest'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60',
|
||||
label: 'rust:1.60'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-buster',
|
||||
label: 'rust:1.60-buster'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-bullseye',
|
||||
label: 'rust:1.60-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-slim-buster',
|
||||
label: 'rust:1.60-slim-buster'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-slim-bullseye',
|
||||
label: 'rust:1.60-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-alpine3.14',
|
||||
label: 'rust:1.60-alpine3.14'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-alpine3.15',
|
||||
label: 'rust:1.60-alpine3.15'
|
||||
}
|
||||
];
|
||||
const phpVersions = [
|
||||
{
|
||||
value: 'webdevops/php-apache:8.2',
|
||||
label: 'webdevops/php-apache:8.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.2',
|
||||
label: 'webdevops/php-nginx:8.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.1',
|
||||
label: 'webdevops/php-apache:8.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.1',
|
||||
label: 'webdevops/php-nginx:8.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0',
|
||||
label: 'webdevops/php-apache:8.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.0',
|
||||
label: 'webdevops/php-nginx:8.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.4',
|
||||
label: 'webdevops/php-apache:7.4'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.4',
|
||||
label: 'webdevops/php-nginx:7.4'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.3',
|
||||
label: 'webdevops/php-apache:7.3'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.3',
|
||||
label: 'webdevops/php-nginx:7.3'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.2',
|
||||
label: 'webdevops/php-apache:7.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.2',
|
||||
label: 'webdevops/php-nginx:7.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.1',
|
||||
label: 'webdevops/php-apache:7.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.1',
|
||||
label: 'webdevops/php-nginx:7.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.0',
|
||||
label: 'webdevops/php-apache:7.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.0',
|
||||
label: 'webdevops/php-nginx:7.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:5.6',
|
||||
label: 'webdevops/php-apache:5.6'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:5.6',
|
||||
label: 'webdevops/php-nginx:5.6'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.2-alpine',
|
||||
label: 'webdevops/php-apache:8.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.2-alpine',
|
||||
label: 'webdevops/php-nginx:8.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.1-alpine',
|
||||
label: 'webdevops/php-apache:8.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.1-alpine',
|
||||
label: 'webdevops/php-nginx:8.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0-alpine',
|
||||
label: 'webdevops/php-apache:8.0-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.0-alpine',
|
||||
label: 'webdevops/php-nginx:8.0-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.4-alpine',
|
||||
label: 'webdevops/php-apache:7.4-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.4-alpine',
|
||||
label: 'webdevops/php-nginx:7.4-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.3-alpine',
|
||||
label: 'webdevops/php-apache:7.3-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.3-alpine',
|
||||
label: 'webdevops/php-nginx:7.3-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.2-alpine',
|
||||
label: 'webdevops/php-apache:7.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.2-alpine',
|
||||
label: 'webdevops/php-nginx:7.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.1-alpine',
|
||||
label: 'webdevops/php-apache:7.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'php:8.1-fpm',
|
||||
label: 'php:8.1-fpm'
|
||||
},
|
||||
{
|
||||
value: 'php:8.0-fpm',
|
||||
label: 'php:8.0-fpm'
|
||||
},
|
||||
{
|
||||
value: 'php:8.1-fpm-alpine',
|
||||
label: 'php:8.1-fpm-alpine'
|
||||
},
|
||||
{
|
||||
value: 'php:8.0-fpm-alpine',
|
||||
label: 'php:8.0-fpm-alpine'
|
||||
}
|
||||
];
|
||||
const pythonVersions = [
|
||||
{
|
||||
value: 'python:3.10-alpine',
|
||||
label: 'python:3.10-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.10-buster',
|
||||
label: 'python:3.10-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.10-bullseye',
|
||||
label: 'python:3.10-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.10-slim-bullseye',
|
||||
label: 'python:3.10-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-alpine',
|
||||
label: 'python:3.9-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-buster',
|
||||
label: 'python:3.9-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-bullseye',
|
||||
label: 'python:3.9-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-slim-bullseye',
|
||||
label: 'python:3.9-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-alpine',
|
||||
label: 'python:3.8-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-buster',
|
||||
label: 'python:3.8-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-bullseye',
|
||||
label: 'python:3.8-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-slim-bullseye',
|
||||
label: 'python:3.8-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-alpine',
|
||||
label: 'python:3.7-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-buster',
|
||||
label: 'python:3.7-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-bullseye',
|
||||
label: 'python:3.7-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-slim-bullseye',
|
||||
label: 'python:3.7-slim-bullseye'
|
||||
}
|
||||
];
|
||||
const herokuVersions = [
|
||||
{
|
||||
value: 'heroku/builder:22',
|
||||
label: 'heroku/builder:22'
|
||||
},
|
||||
{
|
||||
value: 'heroku/buildpacks:20',
|
||||
label: 'heroku/buildpacks:20'
|
||||
},
|
||||
{
|
||||
value: 'heroku/builder-classic:22',
|
||||
label: 'heroku/builder-classic:22'
|
||||
}
|
||||
];
|
||||
let payload: any = {
|
||||
baseImage: null,
|
||||
baseBuildImage: null,
|
||||
baseImages: [],
|
||||
baseBuildImages: []
|
||||
};
|
||||
if (nodeBased.includes(buildPack)) {
|
||||
if (deploymentType === 'static') {
|
||||
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||
payload.baseImages = isARM(process.arch)
|
||||
? staticVersions.filter((version) => !version.value.includes('webdevops'))
|
||||
: staticVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
} else {
|
||||
payload.baseImage = 'node:lts';
|
||||
payload.baseImages = nodeVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
}
|
||||
if (staticApps.includes(buildPack)) {
|
||||
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||
payload.baseImages = isARM(process.arch)
|
||||
? staticVersions.filter((version) => !version.value.includes('webdevops'))
|
||||
: staticVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === 'python') {
|
||||
payload.baseImage = 'python:3.10-alpine';
|
||||
payload.baseImages = pythonVersions;
|
||||
}
|
||||
if (buildPack === 'rust') {
|
||||
payload.baseImage = 'rust:latest';
|
||||
payload.baseBuildImage = 'rust:latest';
|
||||
payload.baseImages = rustVersions;
|
||||
payload.baseBuildImages = rustVersions;
|
||||
}
|
||||
if (buildPack === 'deno') {
|
||||
payload.baseImage = 'denoland/deno:latest';
|
||||
}
|
||||
if (buildPack === 'php') {
|
||||
payload.baseImage = isARM(process.arch)
|
||||
? 'php:8.1-fpm-alpine'
|
||||
: 'webdevops/php-apache:8.2-alpine';
|
||||
payload.baseImages = isARM(process.arch)
|
||||
? phpVersions.filter((version) => !version.value.includes('webdevops'))
|
||||
: phpVersions;
|
||||
}
|
||||
if (buildPack === 'laravel') {
|
||||
payload.baseImage = isARM(process.arch)
|
||||
? 'php:8.1-fpm-alpine'
|
||||
: 'webdevops/php-apache:8.2-alpine';
|
||||
payload.baseImages = isARM(process.arch)
|
||||
? phpVersions.filter((version) => !version.value.includes('webdevops'))
|
||||
: phpVersions;
|
||||
payload.baseBuildImage = 'node:18';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === 'heroku') {
|
||||
payload.baseImage = 'heroku/buildpacks:20';
|
||||
payload.baseImages = herokuVersions;
|
||||
}
|
||||
return payload;
|
||||
}
|
||||
|
||||
export const setDefaultConfiguration = async (data: any) => {
|
||||
let {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
} = data;
|
||||
//@ts-ignore
|
||||
const template = scanningTemplates[buildPack];
|
||||
if (!port) {
|
||||
port = template?.port || 3000;
|
||||
|
||||
if (buildPack === 'static') port = 80;
|
||||
else if (buildPack === 'node') port = 3000;
|
||||
else if (buildPack === 'php') port = 80;
|
||||
else if (buildPack === 'python') port = 8000;
|
||||
}
|
||||
if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
installCommand = template?.installCommand || 'yarn install';
|
||||
if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
startCommand = template?.startCommand || 'yarn start';
|
||||
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
buildCommand = template?.buildCommand || null;
|
||||
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
||||
if (baseDirectory) {
|
||||
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
||||
if (baseDirectory.endsWith('/') && baseDirectory !== '/')
|
||||
baseDirectory = baseDirectory.slice(0, -1);
|
||||
}
|
||||
if (dockerFileLocation) {
|
||||
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
||||
if (dockerFileLocation.endsWith('/')) dockerFileLocation = dockerFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerFileLocation = '/Dockerfile';
|
||||
}
|
||||
if (dockerComposeFileLocation) {
|
||||
if (!dockerComposeFileLocation.startsWith('/'))
|
||||
dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
|
||||
if (dockerComposeFileLocation.endsWith('/'))
|
||||
dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerComposeFileLocation = '/Dockerfile';
|
||||
}
|
||||
if (!denoMainFile) {
|
||||
denoMainFile = 'main.ts';
|
||||
}
|
||||
|
||||
return {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
};
|
||||
};
|
||||
|
||||
export const scanningTemplates = {
|
||||
'@sveltejs/kit': {
|
||||
buildPack: 'nodejs'
|
||||
},
|
||||
astro: {
|
||||
buildPack: 'astro'
|
||||
},
|
||||
'@11ty/eleventy': {
|
||||
buildPack: 'eleventy'
|
||||
},
|
||||
svelte: {
|
||||
buildPack: 'svelte'
|
||||
},
|
||||
'@nestjs/core': {
|
||||
buildPack: 'nestjs'
|
||||
},
|
||||
next: {
|
||||
buildPack: 'nextjs'
|
||||
},
|
||||
nuxt: {
|
||||
buildPack: 'nuxtjs'
|
||||
},
|
||||
'react-scripts': {
|
||||
buildPack: 'react'
|
||||
},
|
||||
'parcel-bundler': {
|
||||
buildPack: 'static'
|
||||
},
|
||||
'@vue/cli-service': {
|
||||
buildPack: 'vuejs'
|
||||
},
|
||||
vuejs: {
|
||||
buildPack: 'vuejs'
|
||||
},
|
||||
gatsby: {
|
||||
buildPack: 'gatsby'
|
||||
},
|
||||
'preact-cli': {
|
||||
buildPack: 'react'
|
||||
}
|
||||
};
|
||||
|
||||
export const saveBuildLog = async ({
|
||||
line,
|
||||
buildId,
|
||||
applicationId
|
||||
}: {
|
||||
line: string;
|
||||
buildId: string;
|
||||
applicationId: string;
|
||||
}): Promise<any> => {
|
||||
if (buildId === 'undefined' || buildId === 'null' || !buildId) return;
|
||||
if (applicationId === 'undefined' || applicationId === 'null' || !applicationId) return;
|
||||
const { default: got } = await import('got');
|
||||
if (typeof line === 'object' && line) {
|
||||
if (line.shortMessage) {
|
||||
line = line.shortMessage + '\n' + line.stderr;
|
||||
} else {
|
||||
line = JSON.stringify(line);
|
||||
}
|
||||
}
|
||||
if (line && typeof line === 'string' && line.includes('ghs_')) {
|
||||
const regex = /ghs_.*@/g;
|
||||
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
|
||||
}
|
||||
const addTimestamp = `[${generateTimestamp()}] ${line}`;
|
||||
const fluentBitUrl = isDev
|
||||
? process.env.COOLIFY_CONTAINER_DEV === 'true'
|
||||
? 'http://coolify-fluentbit:24224'
|
||||
: 'http://localhost:24224'
|
||||
: 'http://coolify-fluentbit:24224';
|
||||
|
||||
if (isDev && !process.env.COOLIFY_CONTAINER_DEV) {
|
||||
console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
}
|
||||
try {
|
||||
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
|
||||
json: {
|
||||
line: encrypt(line)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
return await prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp,
|
||||
buildId,
|
||||
time: Number(day().valueOf()),
|
||||
applicationId
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function copyBaseConfigurationFiles(
|
||||
buildPack,
|
||||
workdir,
|
||||
buildId,
|
||||
applicationId,
|
||||
baseImage
|
||||
) {
|
||||
try {
|
||||
if (buildPack === 'php') {
|
||||
await fs.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`);
|
||||
await saveBuildLog({
|
||||
line: 'Copied default configuration file for PHP.',
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
} else if (baseImage?.includes('nginx')) {
|
||||
await fs.writeFile(
|
||||
`${workdir}/nginx.conf`,
|
||||
`user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /docker.stdout;
|
||||
pid /run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /docker.stdout main;
|
||||
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /app;
|
||||
index index.html;
|
||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||
}
|
||||
|
||||
error_page 404 /50x.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /app;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
);
|
||||
}
|
||||
// TODO: Add more configuration files for other buildpacks, like apache2, etc.
|
||||
} catch (error) {
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
|
||||
export function checkPnpm(installCommand = null, buildCommand = null, startCommand = null) {
|
||||
return (
|
||||
installCommand?.includes('pnpm') ||
|
||||
buildCommand?.includes('pnpm') ||
|
||||
startCommand?.includes('pnpm')
|
||||
);
|
||||
}
|
||||
|
||||
export async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
|
||||
if (!username || !password) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let decryptedPassword = decrypt(password);
|
||||
const location = `${workdir}/.docker`;
|
||||
|
||||
try {
|
||||
await fs.mkdir(`${workdir}/.docker`);
|
||||
} catch (error) {
|
||||
// console.log(error);
|
||||
}
|
||||
const payload = JSON.stringify({
|
||||
auths: {
|
||||
[url]: {
|
||||
auth: Buffer.from(`${username}:${decryptedPassword}`).toString('base64')
|
||||
}
|
||||
}
|
||||
});
|
||||
await fs.writeFile(`${location}/config.json`, payload);
|
||||
return location;
|
||||
}
|
||||
export async function buildImage({
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
buildId,
|
||||
dockerId,
|
||||
isCache = false,
|
||||
debug = false,
|
||||
dockerFileLocation = '/Dockerfile',
|
||||
commit,
|
||||
forceRebuild = false
|
||||
}) {
|
||||
if (isCache) {
|
||||
await saveBuildLog({ line: `Building cache image...`, buildId, applicationId });
|
||||
} else {
|
||||
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||
}
|
||||
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`;
|
||||
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`;
|
||||
let location = null;
|
||||
|
||||
const { dockerRegistry } = await prisma.application.findUnique({
|
||||
where: { id: applicationId },
|
||||
select: { dockerRegistry: true }
|
||||
});
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry;
|
||||
location = await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||
}
|
||||
|
||||
await executeCommand({
|
||||
stream: true,
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker ${location ? `--config ${location}` : ''} build ${
|
||||
forceRebuild ? '--no-cache' : ''
|
||||
} --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}`
|
||||
});
|
||||
|
||||
const { status } = await prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (status === 'canceled') {
|
||||
throw new Error('Canceled.');
|
||||
}
|
||||
}
|
||||
export function makeLabelForSimpleDockerfile({ applicationId, port, type }) {
|
||||
return [
|
||||
'coolify.managed=true',
|
||||
`coolify.version=${version}`,
|
||||
`coolify.applicationId=${applicationId}`,
|
||||
`coolify.type=standalone-application`
|
||||
];
|
||||
}
|
||||
export function makeLabelForStandaloneApplication({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId = null,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
}) {
|
||||
if (pullmergeRequestId) {
|
||||
const protocol = fqdn.startsWith('https://') ? 'https' : 'http';
|
||||
const domain = getDomain(fqdn);
|
||||
fqdn = `${protocol}://${pullmergeRequestId}.${domain}`;
|
||||
}
|
||||
return [
|
||||
'coolify.managed=true',
|
||||
`coolify.version=${version}`,
|
||||
`coolify.applicationId=${applicationId}`,
|
||||
`coolify.type=standalone-application`,
|
||||
`coolify.name=${name}`,
|
||||
`coolify.configuration=${base64Encode(
|
||||
JSON.stringify({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
})
|
||||
)}`
|
||||
];
|
||||
}
|
||||
|
||||
export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
const {
|
||||
workdir,
|
||||
buildId,
|
||||
baseDirectory,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
secrets,
|
||||
pullmergeRequestId
|
||||
} = data;
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand);
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
if (installCommand) {
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
}
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
|
||||
export async function buildCacheImageForLaravel(data, imageForBuild) {
|
||||
const { workdir, buildId, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push(`COPY *.json *.mix.js /app/`);
|
||||
Dockerfile.push(`COPY resources /app/resources`);
|
||||
Dockerfile.push(`RUN yarn install && yarn production`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
|
||||
export async function buildCacheImageWithCargo(data, imageForBuild) {
|
||||
const { applicationId, workdir, buildId } = data;
|
||||
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push('RUN cargo install cargo-chef');
|
||||
Dockerfile.push('COPY . .');
|
||||
Dockerfile.push('RUN cargo chef prepare --recipe-path recipe.json');
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push('RUN cargo install cargo-chef');
|
||||
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
|
||||
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
111
apps/server/src/lib/buildPacks/compose.ts
Normal file
111
apps/server/src/lib/buildPacks/compose.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { saveBuildLog } from './common';
|
||||
import yaml from 'js-yaml';
|
||||
import { defaultComposeConfiguration } from '../docker';
|
||||
import { executeCommand } from '../executeCommand';
|
||||
|
||||
export default async function (data) {
|
||||
let {
|
||||
applicationId,
|
||||
debug,
|
||||
buildId,
|
||||
dockerId,
|
||||
network,
|
||||
volumes,
|
||||
labels,
|
||||
workdir,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
dockerComposeConfiguration,
|
||||
dockerComposeFileLocation
|
||||
} = data;
|
||||
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
|
||||
const dockerComposeRaw = await fs.readFile(fileYaml, 'utf8');
|
||||
const dockerComposeYaml = yaml.load(dockerComposeRaw);
|
||||
if (!dockerComposeYaml.services) {
|
||||
throw 'No Services found in docker-compose file.';
|
||||
}
|
||||
let envs = [];
|
||||
if (secrets.length > 0) {
|
||||
envs = [...envs, ...generateSecrets(secrets, pullmergeRequestId, false, null)];
|
||||
}
|
||||
|
||||
const composeVolumes = [];
|
||||
if (volumes.length > 0) {
|
||||
for (const volume of volumes) {
|
||||
let [v, path] = volume.split(':');
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let networks = {};
|
||||
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
||||
value['container_name'] = `${applicationId}-${key}`;
|
||||
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment']
|
||||
value['environment'] = [...environment, ...envs];
|
||||
value['labels'] = labels;
|
||||
// TODO: If we support separated volume for each service, we need to add it here
|
||||
if (value['volumes']?.length > 0) {
|
||||
value['volumes'] = value['volumes'].map((volume) => {
|
||||
let [v, path, permission] = volume.split(':');
|
||||
if (!path) {
|
||||
path = v;
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
} else {
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
}
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
return `${v}:${path}${permission ? ':' + permission : ''}`;
|
||||
});
|
||||
}
|
||||
if (volumes.length > 0) {
|
||||
for (const volume of volumes) {
|
||||
value['volumes'].push(volume);
|
||||
}
|
||||
}
|
||||
if (dockerComposeConfiguration[key].port) {
|
||||
value['expose'] = [dockerComposeConfiguration[key].port];
|
||||
}
|
||||
if (value['networks']?.length > 0) {
|
||||
value['networks'].forEach((network) => {
|
||||
networks[network] = {
|
||||
name: network
|
||||
};
|
||||
});
|
||||
}
|
||||
value['networks'] = [...(value['networks'] || ''), network];
|
||||
dockerComposeYaml.services[key] = {
|
||||
...dockerComposeYaml.services[key],
|
||||
restart: defaultComposeConfiguration(network).restart,
|
||||
deploy: defaultComposeConfiguration(network).deploy
|
||||
};
|
||||
}
|
||||
if (Object.keys(composeVolumes).length > 0) {
|
||||
dockerComposeYaml['volumes'] = { ...composeVolumes };
|
||||
}
|
||||
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } });
|
||||
|
||||
await fs.writeFile(fileYaml, yaml.dump(dockerComposeYaml));
|
||||
await executeCommand({
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} pull`
|
||||
});
|
||||
await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId });
|
||||
await executeCommand({
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} build --progress plain`
|
||||
});
|
||||
await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId });
|
||||
}
|
||||
52
apps/server/src/lib/buildPacks/deno.ts
Normal file
52
apps/server/src/lib/buildPacks/deno.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
let depsFound = false;
|
||||
try {
|
||||
await fs.readFile(`${workdir}${baseDirectory || ''}/deps.ts`);
|
||||
depsFound = true;
|
||||
} catch (error) {}
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (depsFound) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/deps.ts /app`);
|
||||
Dockerfile.push(`RUN deno cache deps.ts`);
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
||||
Dockerfile.push(`ENV NO_COLOR true`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD deno run ${denoOptions || ''} ${denoMainFile}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
27
apps/server/src/lib/buildPacks/docker.ts
Normal file
27
apps/server/src/lib/buildPacks/docker.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage } from './common';
|
||||
|
||||
export default async function (data) {
|
||||
let { workdir, buildId, baseDirectory, secrets, pullmergeRequestId, dockerFileLocation } = data;
|
||||
const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
|
||||
data.workdir = `${workdir}${baseDirectory}`;
|
||||
const DockerfileRaw = await fs.readFile(`${file}`, 'utf8');
|
||||
const Dockerfile: Array<string> = DockerfileRaw.toString().trim().split('\n');
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, `LABEL coolify.buildId=${buildId}`);
|
||||
}
|
||||
});
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, env);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
await fs.writeFile(`${data.workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
|
||||
await buildImage(data);
|
||||
}
|
||||
28
apps/server/src/lib/buildPacks/gatsby.ts
Normal file
28
apps/server/src/lib/buildPacks/gatsby.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, imageforBuild): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${imageforBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
17
apps/server/src/lib/buildPacks/heroku.ts
Normal file
17
apps/server/src/lib/buildPacks/heroku.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { executeCommand } from "../executeCommand";
|
||||
import { saveBuildLog } from "./common";
|
||||
|
||||
export default async function (data: any): Promise<void> {
|
||||
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
|
||||
try {
|
||||
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||
await executeCommand({
|
||||
buildId,
|
||||
debug,
|
||||
dockerId,
|
||||
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
|
||||
})
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
41
apps/server/src/lib/buildPacks/index.ts
Normal file
41
apps/server/src/lib/buildPacks/index.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import node from './node';
|
||||
import staticApp from './static';
|
||||
import docker from './docker';
|
||||
import gatsby from './gatsby';
|
||||
import svelte from './svelte';
|
||||
import react from './react';
|
||||
import nestjs from './nestjs';
|
||||
import nextjs from './nextjs';
|
||||
import nuxtjs from './nuxtjs';
|
||||
import vuejs from './vuejs';
|
||||
import php from './php';
|
||||
import rust from './rust';
|
||||
import astro from './static';
|
||||
import eleventy from './static';
|
||||
import python from './python';
|
||||
import deno from './deno';
|
||||
import laravel from './laravel';
|
||||
import heroku from './heroku';
|
||||
import compose from './compose';
|
||||
|
||||
export {
|
||||
node,
|
||||
staticApp,
|
||||
docker,
|
||||
gatsby,
|
||||
svelte,
|
||||
react,
|
||||
nestjs,
|
||||
nextjs,
|
||||
nuxtjs,
|
||||
vuejs,
|
||||
php,
|
||||
rust,
|
||||
astro,
|
||||
eleventy,
|
||||
python,
|
||||
deno,
|
||||
laravel,
|
||||
heroku,
|
||||
compose
|
||||
};
|
||||
46
apps/server/src/lib/buildPacks/laravel.ts
Normal file
46
apps/server/src/lib/buildPacks/laravel.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildCacheImageForLaravel, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { workdir, applicationId, tag, buildId, port, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`);
|
||||
Dockerfile.push(`COPY --chown=application:application composer.* ./`);
|
||||
Dockerfile.push(`COPY --chown=application:application database/ database/`);
|
||||
Dockerfile.push(
|
||||
`RUN composer install --ignore-platform-reqs --no-interaction --no-plugins --no-scripts --prefer-dist`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/js/ /app/public/js/`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/css/ /app/public/css/`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
|
||||
);
|
||||
Dockerfile.push(`COPY --chown=application:application . ./`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
try {
|
||||
await buildCacheImageForLaravel(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
31
apps/server/src/lib/buildPacks/nestjs.ts
Normal file
31
apps/server/src/lib/buildPacks/nestjs.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = startCommand.includes('pnpm');
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${baseDirectory || ''} ./`);
|
||||
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
66
apps/server/src/lib/buildPacks/nextjs.ts
Normal file
66
apps/server/src/lib/buildPacks/nextjs.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
applicationId,
|
||||
buildId,
|
||||
tag,
|
||||
workdir,
|
||||
publishDirectory,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
deploymentType,
|
||||
baseImage
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
if (deploymentType === 'node') {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`EXPOSE 80`);
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
|
||||
if (deploymentType === 'node') {
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
49
apps/server/src/lib/buildPacks/node.ts
Normal file
49
apps/server/src/lib/buildPacks/node.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage, checkPnpm } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
66
apps/server/src/lib/buildPacks/nuxtjs.ts
Normal file
66
apps/server/src/lib/buildPacks/nuxtjs.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
applicationId,
|
||||
buildId,
|
||||
tag,
|
||||
workdir,
|
||||
publishDirectory,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
deploymentType,
|
||||
baseImage
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
if (deploymentType === 'node') {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`EXPOSE 80`);
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
|
||||
if (deploymentType === 'node') {
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
50
apps/server/src/lib/buildPacks/php.ts
Normal file
50
apps/server/src/lib/buildPacks/php.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
||||
const { workdir, baseDirectory, buildId, port, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
let composerFound = false;
|
||||
try {
|
||||
await fs.readFile(`${workdir}${baseDirectory || ''}/composer.json`);
|
||||
composerFound = true;
|
||||
} catch (error) {}
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} /app`);
|
||||
if (htaccessFound) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/.htaccess ./`);
|
||||
}
|
||||
if (composerFound) {
|
||||
Dockerfile.push(`RUN composer install`);
|
||||
}
|
||||
|
||||
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
const { workdir, baseDirectory, baseImage } = data;
|
||||
try {
|
||||
let htaccessFound = false;
|
||||
try {
|
||||
await fs.readFile(`${workdir}${baseDirectory || ''}/.htaccess`);
|
||||
htaccessFound = true;
|
||||
} catch (e) {
|
||||
//
|
||||
}
|
||||
await createDockerfile(data, baseImage, htaccessFound);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
67
apps/server/src/lib/buildPacks/python.ts
Normal file
67
apps/server/src/lib/buildPacks/python.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
||||
Dockerfile.push(`RUN pip install gunicorn`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
|
||||
Dockerfile.push(`RUN pip install uvicorn`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
||||
Dockerfile.push(`RUN apk add --no-cache uwsgi-python3`);
|
||||
// Dockerfile.push(`RUN pip install --no-cache-dir uwsgi`)
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.stat(`${workdir}${baseDirectory || ''}/requirements.txt`);
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/requirements.txt ./`);
|
||||
Dockerfile.push(`RUN pip install --no-cache-dir -r .${baseDirectory || ''}/requirements.txt`);
|
||||
} catch (e) {
|
||||
//
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
||||
Dockerfile.push(`CMD gunicorn -w=4 -b=0.0.0.0:8000 ${pythonModule}:${pythonVariable}`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
|
||||
Dockerfile.push(`CMD uvicorn ${pythonModule}:${pythonVariable} --port ${port} --host 0.0.0.0`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
||||
Dockerfile.push(
|
||||
`CMD uwsgi --master -p 4 --http-socket 0.0.0.0:8000 --uid uwsgi --plugins python3 --protocol uwsgi --wsgi ${pythonModule}:${pythonVariable}`
|
||||
);
|
||||
} else {
|
||||
Dockerfile.push(`CMD python ${pythonModule}`);
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/server/src/lib/buildPacks/react.ts
Normal file
28
apps/server/src/lib/buildPacks/react.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
40
apps/server/src/lib/buildPacks/rust.ts
Normal file
40
apps/server/src/lib/buildPacks/rust.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import TOML from '@iarna/toml';
|
||||
import { buildCacheImageWithCargo, buildImage } from './common';
|
||||
import { executeCommand } from '../executeCommand';
|
||||
|
||||
const createDockerfile = async (data, image, name): Promise<void> => {
|
||||
const { workdir, port, applicationId, tag, buildId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target target`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /usr/local/cargo /usr/local/cargo`);
|
||||
Dockerfile.push(`COPY . .`);
|
||||
Dockerfile.push(`RUN cargo build --release --bin ${name}`);
|
||||
Dockerfile.push('FROM debian:buster-slim');
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(
|
||||
`RUN apt-get update -y && apt-get install -y --no-install-recommends openssl libcurl4 ca-certificates && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*`
|
||||
);
|
||||
Dockerfile.push(`RUN update-ca-certificates`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target/release/${name} ${name}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ["/app/${name}"]`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { workdir, baseImage, baseBuildImage } = data;
|
||||
const { stdout: cargoToml } = await executeCommand({ command: `cat ${workdir}/Cargo.toml` });
|
||||
const parsedToml: any = TOML.parse(cargoToml);
|
||||
const name = parsedToml.package.name;
|
||||
await buildCacheImageWithCargo(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage, name);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
54
apps/server/src/lib/buildPacks/static.ts
Normal file
54
apps/server/src/lib/buildPacks/static.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
buildCommand,
|
||||
baseDirectory,
|
||||
publishDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
baseImage,
|
||||
buildId,
|
||||
port
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
if (baseImage?.includes('httpd')) {
|
||||
Dockerfile.push('WORKDIR /usr/local/apache2/htdocs/');
|
||||
} else {
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
}
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
} else {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
}
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
if (data.buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/server/src/lib/buildPacks/svelte.ts
Normal file
28
apps/server/src/lib/buildPacks/svelte.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/server/src/lib/buildPacks/vuejs.ts
Normal file
28
apps/server/src/lib/buildPacks/vuejs.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -535,3 +535,134 @@ export async function cleanupDB(buildId: string, applicationId: string) {
|
||||
}
|
||||
await saveBuildLog({ line: 'Canceled.', buildId, applicationId });
|
||||
}
|
||||
|
||||
export const base64Encode = (text: string): string => {
|
||||
return Buffer.from(text).toString('base64');
|
||||
};
|
||||
export const base64Decode = (text: string): string => {
|
||||
return Buffer.from(text, 'base64').toString('ascii');
|
||||
};
|
||||
function parseSecret(secret, isBuild) {
|
||||
if (secret.value.includes('$')) {
|
||||
secret.value = secret.value.replaceAll('$', '$$$$');
|
||||
}
|
||||
if (secret.value.includes('\\n')) {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}=${secret.value}`;
|
||||
} else {
|
||||
return `${secret.name}=${secret.value}`;
|
||||
}
|
||||
} else if (secret.value.includes(' ')) {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}='${secret.value}'`;
|
||||
} else {
|
||||
return `${secret.name}='${secret.value}'`;
|
||||
}
|
||||
} else {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}=${secret.value}`;
|
||||
} else {
|
||||
return `${secret.name}=${secret.value}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
export function generateSecrets(
|
||||
secrets: Array<any>,
|
||||
pullmergeRequestId: string,
|
||||
isBuild = false,
|
||||
port = null
|
||||
): Array<string> {
|
||||
const envs = [];
|
||||
const isPRMRSecret = secrets.filter((s) => s.isPRMRSecret);
|
||||
const normalSecrets = secrets.filter((s) => !s.isPRMRSecret);
|
||||
if (pullmergeRequestId && isPRMRSecret.length > 0) {
|
||||
isPRMRSecret.forEach((secret) => {
|
||||
if (isBuild && !secret.isBuildSecret) {
|
||||
return;
|
||||
}
|
||||
const build = isBuild && secret.isBuildSecret;
|
||||
envs.push(parseSecret(secret, build));
|
||||
});
|
||||
}
|
||||
if (!pullmergeRequestId && normalSecrets.length > 0) {
|
||||
normalSecrets.forEach((secret) => {
|
||||
if (isBuild && !secret.isBuildSecret) {
|
||||
return;
|
||||
}
|
||||
const build = isBuild && secret.isBuildSecret;
|
||||
envs.push(parseSecret(secret, build));
|
||||
});
|
||||
}
|
||||
const portFound = envs.filter((env) => env.startsWith('PORT'));
|
||||
if (portFound.length === 0 && port && !isBuild) {
|
||||
envs.push(`PORT=${port}`);
|
||||
}
|
||||
const nodeEnv = envs.filter((env) => env.startsWith('NODE_ENV'));
|
||||
if (nodeEnv.length === 0 && !isBuild) {
|
||||
envs.push(`NODE_ENV=production`);
|
||||
}
|
||||
return envs;
|
||||
}
|
||||
export function decryptApplication(application: any) {
|
||||
if (application) {
|
||||
if (application?.gitSource?.githubApp?.clientSecret) {
|
||||
application.gitSource.githubApp.clientSecret =
|
||||
decrypt(application.gitSource.githubApp.clientSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.webhookSecret) {
|
||||
application.gitSource.githubApp.webhookSecret =
|
||||
decrypt(application.gitSource.githubApp.webhookSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.privateKey) {
|
||||
application.gitSource.githubApp.privateKey =
|
||||
decrypt(application.gitSource.githubApp.privateKey) || null;
|
||||
}
|
||||
if (application?.gitSource?.gitlabApp?.appSecret) {
|
||||
application.gitSource.gitlabApp.appSecret =
|
||||
decrypt(application.gitSource.gitlabApp.appSecret) || null;
|
||||
}
|
||||
if (application?.secrets.length > 0) {
|
||||
application.secrets = application.secrets.map((s: any) => {
|
||||
s.value = decrypt(s.value) || null;
|
||||
return s;
|
||||
});
|
||||
}
|
||||
|
||||
return application;
|
||||
}
|
||||
}
|
||||
export async function pushToRegistry(
|
||||
application: any,
|
||||
workdir: string,
|
||||
tag: string,
|
||||
imageName: string,
|
||||
customTag: string
|
||||
) {
|
||||
const location = `${workdir}/.docker`;
|
||||
const tagCommand = `docker tag ${application.id}:${tag} ${imageName}:${customTag}`;
|
||||
const pushCommand = `docker --config ${location} push ${imageName}:${customTag}`;
|
||||
await executeCommand({
|
||||
dockerId: application.destinationDockerId,
|
||||
command: tagCommand
|
||||
});
|
||||
await executeCommand({
|
||||
dockerId: application.destinationDockerId,
|
||||
command: pushCommand
|
||||
});
|
||||
}
|
||||
|
||||
export async function getContainerUsage(dockerId: string, container: string): Promise<any> {
|
||||
try {
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"`
|
||||
});
|
||||
return JSON.parse(stdout);
|
||||
} catch (err) {
|
||||
return {
|
||||
MemUsage: 0,
|
||||
CPUPerc: 0,
|
||||
NetIO: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
96
apps/server/src/lib/importers/github.ts
Normal file
96
apps/server/src/lib/importers/github.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
|
||||
import jsonwebtoken from 'jsonwebtoken';
|
||||
import { prisma } from '../../prisma';
|
||||
import { saveBuildLog } from '../buildPacks/common';
|
||||
import { decrypt } from '../common';
|
||||
import { executeCommand } from '../executeCommand';
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
workdir,
|
||||
githubAppId,
|
||||
repository,
|
||||
apiUrl,
|
||||
gitCommitHash,
|
||||
htmlUrl,
|
||||
branch,
|
||||
buildId,
|
||||
customPort,
|
||||
forPublic
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
githubAppId: string;
|
||||
repository: string;
|
||||
apiUrl: string;
|
||||
gitCommitHash?: string;
|
||||
htmlUrl: string;
|
||||
branch: string;
|
||||
buildId: string;
|
||||
customPort: number;
|
||||
forPublic?: boolean;
|
||||
}): Promise<string> {
|
||||
const { default: got } = await import('got')
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
||||
if (forPublic) {
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await saveBuildLog({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
|
||||
} else {
|
||||
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||
if (body.privateKey) body.privateKey = decrypt(body.privateKey);
|
||||
const { privateKey, appId, installationId } = body
|
||||
const githubPrivateKey = privateKey.replace(/\\n/g, '\n').replace(/"/g, '');
|
||||
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: appId
|
||||
};
|
||||
const jwtToken = jsonwebtoken.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
});
|
||||
const { token } = await got
|
||||
.post(`${apiUrl}/app/installations/${installationId}/access_tokens`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
.json();
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await saveBuildLog({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
65
apps/server/src/lib/importers/gitlab.ts
Normal file
65
apps/server/src/lib/importers/gitlab.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { saveBuildLog } from "../buildPacks/common";
|
||||
import { executeCommand } from "../executeCommand";
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
workdir,
|
||||
repodir,
|
||||
htmlUrl,
|
||||
gitCommitHash,
|
||||
repository,
|
||||
branch,
|
||||
buildId,
|
||||
privateSshKey,
|
||||
customPort,
|
||||
forPublic,
|
||||
customUser,
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
repository: string;
|
||||
htmlUrl: string;
|
||||
branch: string;
|
||||
buildId: string;
|
||||
repodir: string;
|
||||
gitCommitHash: string;
|
||||
privateSshKey: string;
|
||||
customPort: number;
|
||||
forPublic: boolean;
|
||||
customUser: string;
|
||||
}): Promise<string> {
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||
if (!forPublic) {
|
||||
await executeCommand({ command: `echo '${privateSshKey}' > ${repodir}/id.rsa`, shell: true });
|
||||
await executeCommand({ command: `chmod 600 ${repodir}/id.rsa` });
|
||||
}
|
||||
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await saveBuildLog({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
if (forPublic) {
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} ${customUser}@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
4
apps/server/src/lib/importers/index.ts
Normal file
4
apps/server/src/lib/importers/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import github from './github';
|
||||
import gitlab from './gitlab';
|
||||
|
||||
export { github, gitlab };
|
||||
@@ -1,6 +1,6 @@
|
||||
import Bree from 'bree';
|
||||
import path from 'path';
|
||||
// import Cabin from 'cabin';
|
||||
import Cabin from 'cabin';
|
||||
import TSBree from '@breejs/ts-worker';
|
||||
|
||||
export const isDev = process.env['NODE_ENV'] === 'development';
|
||||
@@ -9,16 +9,8 @@ Bree.extend(TSBree);
|
||||
|
||||
const options: any = {
|
||||
defaultExtension: 'js',
|
||||
logger: false,
|
||||
// logger: false,
|
||||
// workerMessageHandler: async ({ name, message }) => {
|
||||
// if (name === 'deployApplication' && message?.deploying) {
|
||||
// if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
|
||||
// scheduler.workers.get('deployApplication').postMessage('cancel')
|
||||
// }
|
||||
// }
|
||||
// },
|
||||
jobs: [{ name: 'deployApplication' }, { name: 'worker' }]
|
||||
logger: new Cabin({}),
|
||||
jobs: [{ name: 'applicationBuildQueue' }]
|
||||
};
|
||||
if (isDev) options.root = path.join(__dirname, './jobs');
|
||||
|
||||
|
||||
@@ -64,7 +64,11 @@ export function createServer(opts: ServerOptions) {
|
||||
console.log('Coolify server is listening on port', port, 'at 0.0.0.0 🚀');
|
||||
const graceful = new Graceful({ brees: [scheduler] });
|
||||
graceful.listen();
|
||||
scheduler.run('worker');
|
||||
setInterval(async () => {
|
||||
if (!scheduler.workers.has('applicationBuildQueue')) {
|
||||
scheduler.run('applicationBuildQueue');
|
||||
}
|
||||
}, 2000);
|
||||
} catch (err) {
|
||||
server.log.error(err);
|
||||
process.exit(1);
|
||||
|
||||
@@ -24,6 +24,8 @@ import {
|
||||
createDirectories,
|
||||
decrypt,
|
||||
encrypt,
|
||||
generateSecrets,
|
||||
getContainerUsage,
|
||||
getDomain,
|
||||
isDev,
|
||||
isDomainConfigured,
|
||||
@@ -32,8 +34,345 @@ import {
|
||||
} from '../../../lib/common';
|
||||
import { day } from '../../../lib/dayjs';
|
||||
import csv from 'csvtojson';
|
||||
import { scheduler } from '../../../scheduler';
|
||||
|
||||
export const applicationsRouter = router({
|
||||
deleteApplication: privateProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
force: z.boolean().default(false)
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { id, force } = input;
|
||||
const teamId = ctx.user.teamId;
|
||||
const application = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { destinationDocker: true }
|
||||
});
|
||||
if (!force && application?.destinationDockerId && application.destinationDocker?.network) {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: application.destinationDocker.id,
|
||||
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
|
||||
});
|
||||
if (containers) {
|
||||
const containersArray = containers.trim().split('\n');
|
||||
for (const container of containersArray) {
|
||||
const containerObj = JSON.parse(container);
|
||||
const id = containerObj.ID;
|
||||
await removeContainer({ id, dockerId: application.destinationDocker.id });
|
||||
}
|
||||
}
|
||||
}
|
||||
await prisma.applicationSettings.deleteMany({ where: { application: { id } } });
|
||||
await prisma.buildLog.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.build.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.secret.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: id } });
|
||||
if (teamId === '0') {
|
||||
await prisma.application.deleteMany({ where: { id } });
|
||||
} else {
|
||||
await prisma.application.deleteMany({ where: { id, teams: { some: { id: teamId } } } });
|
||||
}
|
||||
return {}
|
||||
}),
|
||||
restartPreview: privateProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
pullmergeRequestId: z.string()
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { id, pullmergeRequestId } = input;
|
||||
const teamId = ctx.user.teamId;
|
||||
let application: any = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
const buildId = cuid();
|
||||
const { id: dockerId, network } = application.destinationDocker;
|
||||
const {
|
||||
secrets,
|
||||
port,
|
||||
repository,
|
||||
persistentStorage,
|
||||
id: applicationId,
|
||||
buildPack,
|
||||
exposePort
|
||||
} = application;
|
||||
|
||||
let envs = [];
|
||||
if (secrets.length > 0) {
|
||||
envs = [...envs, ...generateSecrets(secrets, pullmergeRequestId, false, port)];
|
||||
}
|
||||
const { workdir } = await createDirectories({ repository, buildId });
|
||||
const labels = [];
|
||||
let image = null;
|
||||
const { stdout: container } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container ls --filter 'label=com.docker.compose.service=${id}-${pullmergeRequestId}' --format '{{json .}}'`
|
||||
});
|
||||
const containersArray = container.trim().split('\n');
|
||||
for (const container of containersArray) {
|
||||
const containerObj = formatLabelsOnDocker(container);
|
||||
image = containerObj[0].Image;
|
||||
Object.keys(containerObj[0].Labels).forEach(function (key) {
|
||||
if (key.startsWith('coolify')) {
|
||||
labels.push(`${key}=${containerObj[0].Labels[key]}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
let imageFound = false;
|
||||
try {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker image inspect ${image}`
|
||||
});
|
||||
imageFound = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
if (!imageFound) {
|
||||
throw { status: 500, message: 'Image not found, cannot restart application.' };
|
||||
}
|
||||
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${
|
||||
buildPack !== 'docker' ? '/app' : ''
|
||||
}${storage.path}`;
|
||||
}) || [];
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[`${applicationId}-${pullmergeRequestId}`]: {
|
||||
image,
|
||||
container_name: `${applicationId}-${pullmergeRequestId}`,
|
||||
volumes,
|
||||
environment: envs,
|
||||
labels,
|
||||
depends_on: [],
|
||||
expose: [port],
|
||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||
...defaultComposeConfiguration(network)
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}-${pullmergeRequestId}` });
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` });
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
});
|
||||
}
|
||||
}),
|
||||
getPreviewStatus: privateProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
pullmergeRequestId: z.string()
|
||||
})
|
||||
)
|
||||
.query(async ({ input, ctx }) => {
|
||||
const { id, pullmergeRequestId } = input;
|
||||
const teamId = ctx.user.teamId;
|
||||
let isRunning = false;
|
||||
let isExited = false;
|
||||
let isRestarting = false;
|
||||
let isBuilding = false;
|
||||
const application: any = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
const status = await checkContainer({
|
||||
dockerId: application.destinationDocker.id,
|
||||
container: `${id}-${pullmergeRequestId}`
|
||||
});
|
||||
if (status?.found) {
|
||||
isRunning = status.status.isRunning;
|
||||
isExited = status.status.isExited;
|
||||
isRestarting = status.status.isRestarting;
|
||||
}
|
||||
const building = await prisma.build.findMany({
|
||||
where: { applicationId: id, pullmergeRequestId, status: { in: ['queued', 'running'] } }
|
||||
});
|
||||
isBuilding = building.length > 0;
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
isBuilding,
|
||||
isRunning,
|
||||
isRestarting,
|
||||
isExited
|
||||
}
|
||||
};
|
||||
}),
|
||||
loadPreviews: privateProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string()
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const application = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { destinationDocker: true }
|
||||
});
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId: application.destinationDocker.id,
|
||||
command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"`
|
||||
});
|
||||
if (stdout === '') {
|
||||
throw { status: 500, message: 'No previews found.' };
|
||||
}
|
||||
const containers = formatLabelsOnDocker(stdout).filter(
|
||||
(container) =>
|
||||
container.Labels['coolify.configuration'] &&
|
||||
container.Labels['coolify.type'] === 'standalone-application'
|
||||
);
|
||||
|
||||
const jsonContainers = containers
|
||||
.map((container) =>
|
||||
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
|
||||
)
|
||||
.filter((container) => {
|
||||
return container.pullmergeRequestId && container.applicationId === id;
|
||||
});
|
||||
for (const container of jsonContainers) {
|
||||
const found = await prisma.previewApplication.findMany({
|
||||
where: {
|
||||
applicationId: container.applicationId,
|
||||
pullmergeRequestId: container.pullmergeRequestId
|
||||
}
|
||||
});
|
||||
if (found.length === 0) {
|
||||
await prisma.previewApplication.create({
|
||||
data: {
|
||||
pullmergeRequestId: container.pullmergeRequestId,
|
||||
sourceBranch: container.branch,
|
||||
customDomain: container.fqdn,
|
||||
application: { connect: { id: container.applicationId } }
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
previews: await prisma.previewApplication.findMany({ where: { applicationId: id } })
|
||||
}
|
||||
};
|
||||
}),
|
||||
stopPreview: privateProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
pullmergeRequestId: z.string()
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { id, pullmergeRequestId } = input;
|
||||
const teamId = ctx.user.teamId;
|
||||
const application: any = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
const container = `${id}-${pullmergeRequestId}`;
|
||||
const { id: dockerId } = application.destinationDocker;
|
||||
const { found } = await checkContainer({ dockerId, container });
|
||||
if (found) {
|
||||
await removeContainer({ id: container, dockerId: application.destinationDocker.id });
|
||||
}
|
||||
await prisma.previewApplication.deleteMany({
|
||||
where: { applicationId: application.id, pullmergeRequestId }
|
||||
});
|
||||
}
|
||||
return {};
|
||||
}),
|
||||
getUsage: privateProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
containerId: z.string()
|
||||
})
|
||||
)
|
||||
.query(async ({ input, ctx }) => {
|
||||
const { id, containerId } = input;
|
||||
const teamId = ctx.user.teamId;
|
||||
let usage = {};
|
||||
|
||||
const application: any = await getApplicationFromDB(id, teamId);
|
||||
if (application.destinationDockerId) {
|
||||
[usage] = await Promise.all([
|
||||
getContainerUsage(application.destinationDocker.id, containerId)
|
||||
]);
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
usage
|
||||
}
|
||||
};
|
||||
}),
|
||||
getLocalImages: privateProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string()
|
||||
})
|
||||
)
|
||||
.query(async ({ input, ctx }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user.teamId;
|
||||
const application: any = await getApplicationFromDB(id, teamId);
|
||||
let imagesAvailables = [];
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId: application.destinationDocker.id,
|
||||
command: `docker images --format '{{.Repository}}#{{.Tag}}#{{.CreatedAt}}'`
|
||||
});
|
||||
const { stdout: runningImage } = await executeCommand({
|
||||
dockerId: application.destinationDocker.id,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${id}' --format {{.Image}}`
|
||||
});
|
||||
const images = stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((image) => image.includes(id) && !image.includes('-cache'));
|
||||
for (const image of images) {
|
||||
const [repository, tag, createdAt] = image.split('#');
|
||||
if (tag.includes('-')) {
|
||||
continue;
|
||||
}
|
||||
const [year, time] = createdAt.split(' ');
|
||||
imagesAvailables.push({
|
||||
repository,
|
||||
tag,
|
||||
createdAt: day(year + time).unix()
|
||||
});
|
||||
}
|
||||
|
||||
imagesAvailables = imagesAvailables.sort((a, b) => b.tag - a.tag);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
imagesAvailables,
|
||||
runningImage
|
||||
}
|
||||
};
|
||||
}),
|
||||
resetQueue: privateProcedure.mutation(async ({ ctx }) => {
|
||||
const teamId = ctx.user.teamId;
|
||||
if (teamId === '0') {
|
||||
@@ -41,7 +380,7 @@ export const applicationsRouter = router({
|
||||
where: { status: { in: ['queued', 'running'] } },
|
||||
data: { status: 'canceled' }
|
||||
});
|
||||
// scheduler.workers.get("deployApplication").postMessage("cancel");
|
||||
// scheduler.workers.get("").postMessage("cancel");
|
||||
}
|
||||
}),
|
||||
cancelBuild: privateProcedure
|
||||
@@ -813,170 +1152,183 @@ export const applicationsRouter = router({
|
||||
}
|
||||
return {};
|
||||
}),
|
||||
restart: privateProcedure.input(z.object({ id: z.string() })).mutation(async ({ ctx, input }) => {
|
||||
const { id } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
let application = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
const buildId = cuid();
|
||||
const { id: dockerId, network } = application.destinationDocker;
|
||||
const {
|
||||
dockerRegistry,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
port,
|
||||
repository,
|
||||
persistentStorage,
|
||||
id: applicationId,
|
||||
buildPack,
|
||||
exposePort
|
||||
} = application;
|
||||
let location = null;
|
||||
const labels = [];
|
||||
let image = null;
|
||||
const envs = [`PORT=${port}`, 'NODE_ENV=production'];
|
||||
restart: privateProcedure
|
||||
.input(z.object({ id: z.string(), imageId: z.string().nullable() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, imageId } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
let application = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
const buildId = cuid();
|
||||
const { id: dockerId, network } = application.destinationDocker;
|
||||
const {
|
||||
dockerRegistry,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
port,
|
||||
repository,
|
||||
persistentStorage,
|
||||
id: applicationId,
|
||||
buildPack,
|
||||
exposePort
|
||||
} = application;
|
||||
let location = null;
|
||||
const labels = [];
|
||||
let image = null;
|
||||
const envs = [`PORT=${port}`, 'NODE_ENV=production'];
|
||||
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
const isSecretFound = secrets.filter((s) => s.name === secret.name && s.isPRMRSecret);
|
||||
if (isSecretFound.length > 0) {
|
||||
if (isSecretFound[0].value.includes('\\n') || isSecretFound[0].value.includes("'")) {
|
||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
const isSecretFound = secrets.filter((s) => s.name === secret.name && s.isPRMRSecret);
|
||||
if (isSecretFound.length > 0) {
|
||||
if (
|
||||
isSecretFound[0].value.includes('\\n') ||
|
||||
isSecretFound[0].value.includes("'")
|
||||
) {
|
||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}='${isSecretFound[0].value}'`);
|
||||
}
|
||||
} else {
|
||||
envs.push(`${secret.name}='${isSecretFound[0].value}'`);
|
||||
if (secret.value.includes('\\n') || secret.value.includes("'")) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}='${secret.value}'`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (secret.value.includes('\\n') || secret.value.includes("'")) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}='${secret.value}'`);
|
||||
if (!secret.isPRMRSecret) {
|
||||
if (secret.value.includes('\\n') || secret.value.includes("'")) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}='${secret.value}'`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
if (secret.value.includes('\\n') || secret.value.includes("'")) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}='${secret.value}'`);
|
||||
});
|
||||
}
|
||||
const { workdir } = await createDirectories({ repository, buildId });
|
||||
|
||||
if (imageId) {
|
||||
image = imageId;
|
||||
} else {
|
||||
const { stdout: container } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container ls --filter 'label=com.docker.compose.service=${id}' --format '{{json .}}'`
|
||||
});
|
||||
const containersArray = container.trim().split('\n');
|
||||
for (const container of containersArray) {
|
||||
const containerObj = formatLabelsOnDocker(container);
|
||||
image = containerObj[0].Image;
|
||||
Object.keys(containerObj[0].Labels).forEach(function (key) {
|
||||
if (key.startsWith('coolify')) {
|
||||
labels.push(`${key}=${containerObj[0].Labels[key]}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry;
|
||||
location = await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||
}
|
||||
|
||||
let imageFoundLocally = false;
|
||||
try {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker image inspect ${image}`
|
||||
});
|
||||
imageFoundLocally = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
let imageFoundRemotely = false;
|
||||
try {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker ${location ? `--config ${location}` : ''} pull ${image}`
|
||||
});
|
||||
imageFoundRemotely = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
|
||||
if (!imageFoundLocally && !imageFoundRemotely) {
|
||||
throw { status: 500, message: 'Image not found, cannot restart application.' };
|
||||
}
|
||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||
|
||||
let envFound = false;
|
||||
try {
|
||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${
|
||||
buildPack !== 'docker' ? '/app' : ''
|
||||
}${storage.path}`;
|
||||
}) || [];
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
const { workdir } = await createDirectories({ repository, buildId });
|
||||
|
||||
const { stdout: container } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container ls --filter 'label=com.docker.compose.service=${id}' --format '{{json .}}'`
|
||||
});
|
||||
const containersArray = container.trim().split('\n');
|
||||
for (const container of containersArray) {
|
||||
const containerObj = formatLabelsOnDocker(container);
|
||||
image = containerObj[0].Image;
|
||||
Object.keys(containerObj[0].Labels).forEach(function (key) {
|
||||
if (key.startsWith('coolify')) {
|
||||
labels.push(`${key}=${containerObj[0].Labels[key]}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry;
|
||||
location = await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||
}
|
||||
|
||||
let imageFoundLocally = false;
|
||||
try {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker image inspect ${image}`
|
||||
});
|
||||
imageFoundLocally = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
let imageFoundRemotely = false;
|
||||
try {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker ${location ? `--config ${location}` : ''} pull ${image}`
|
||||
});
|
||||
imageFoundRemotely = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
|
||||
if (!imageFoundLocally && !imageFoundRemotely) {
|
||||
throw { status: 500, message: 'Image not found, cannot restart application.' };
|
||||
}
|
||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||
|
||||
let envFound = false;
|
||||
try {
|
||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${
|
||||
buildPack !== 'docker' ? '/app' : ''
|
||||
}${storage.path}`;
|
||||
}) || [];
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[applicationId]: {
|
||||
image,
|
||||
container_name: applicationId,
|
||||
volumes,
|
||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||
labels,
|
||||
depends_on: [],
|
||||
expose: [port],
|
||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||
...defaultComposeConfiguration(network)
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[applicationId]: {
|
||||
image,
|
||||
container_name: applicationId,
|
||||
volumes,
|
||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||
labels,
|
||||
depends_on: [],
|
||||
expose: [port],
|
||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||
...defaultComposeConfiguration(network)
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||
try {
|
||||
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` });
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}` });
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||
try {
|
||||
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` });
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}` });
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
});
|
||||
}
|
||||
return {};
|
||||
}),
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
});
|
||||
}
|
||||
return {};
|
||||
}),
|
||||
deploy: privateProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string()
|
||||
id: z.string(),
|
||||
forceRebuild: z.boolean().default(false),
|
||||
pullmergeRequestId: z.string().nullable(),
|
||||
branch: z.string().nullable()
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id } = input;
|
||||
const { id, pullmergeRequestId, branch, forceRebuild } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const buildId = await deployApplication(id, teamId);
|
||||
const buildId = await deployApplication(id, teamId, forceRebuild, pullmergeRequestId, branch);
|
||||
return {
|
||||
buildId
|
||||
};
|
||||
|
||||
@@ -7,7 +7,9 @@ import { prisma } from '../../../prisma';
|
||||
export async function deployApplication(
|
||||
id: string,
|
||||
teamId: string,
|
||||
forceRebuild: boolean = false
|
||||
forceRebuild: boolean,
|
||||
pullmergeRequestId: string | null = null,
|
||||
branch: string | null = null
|
||||
): Promise<string | Error> {
|
||||
const buildId = cuid();
|
||||
const application = await getApplicationFromDB(id, teamId);
|
||||
@@ -29,14 +31,20 @@ export async function deployApplication(
|
||||
data: {
|
||||
id: buildId,
|
||||
applicationId: id,
|
||||
sourceBranch: branch,
|
||||
branch: application.branch,
|
||||
pullmergeRequestId: pullmergeRequestId?.toString(),
|
||||
forceRebuild,
|
||||
destinationDockerId: application.destinationDocker?.id,
|
||||
gitSourceId: application.gitSource?.id,
|
||||
githubAppId: application.gitSource?.githubApp?.id,
|
||||
gitlabAppId: application.gitSource?.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'manual'
|
||||
type: pullmergeRequestId
|
||||
? application.gitSource?.githubApp?.id
|
||||
? 'manual_pr'
|
||||
: 'manual_mr'
|
||||
: 'manual'
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
||||
Reference in New Issue
Block a user