fix: security hole
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
import { base64Encode, decrypt, encrypt, executeDockerCmd, generateTimestamp, getDomain, isARM, isDev, prisma, version } from "../common";
|
||||
import { base64Encode, decrypt, encrypt, executeCommand, generateTimestamp, getDomain, isARM, isDev, prisma, version } from "../common";
|
||||
import { promises as fs } from 'fs';
|
||||
import { day } from "../dayjs";
|
||||
|
||||
@@ -656,7 +656,7 @@ export async function buildImage({
|
||||
location = await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||
}
|
||||
|
||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker ${location ? `--config ${location}` : ''} build --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}` })
|
||||
await executeCommand({ debug, buildId, applicationId, dockerId, command: `docker ${location ? `--config ${location}` : ''} build --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}` })
|
||||
|
||||
const { status } = await prisma.build.findUnique({ where: { id: buildId } })
|
||||
if (status === 'canceled') {
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { defaultComposeConfiguration, executeDockerCmd } from '../common';
|
||||
import { defaultComposeConfiguration, executeCommand } from '../common';
|
||||
import { buildImage, saveBuildLog } from './common';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
@@ -108,8 +108,8 @@ export default async function (data) {
|
||||
}
|
||||
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } })
|
||||
await fs.writeFile(fileYaml, yaml.dump(dockerComposeYaml));
|
||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} pull` })
|
||||
await executeCommand({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} pull` })
|
||||
await saveBuildLog({ line: 'Pulling images from Compose file', buildId, applicationId });
|
||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} build --progress plain` })
|
||||
await executeCommand({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} build --progress plain` })
|
||||
await saveBuildLog({ line: 'Building images from Compose file', buildId, applicationId });
|
||||
}
|
||||
|
@@ -1,11 +1,11 @@
|
||||
import { executeDockerCmd, prisma } from "../common"
|
||||
import { executeCommand } from "../common"
|
||||
import { saveBuildLog } from "./common";
|
||||
|
||||
export default async function (data: any): Promise<void> {
|
||||
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
|
||||
try {
|
||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
buildId,
|
||||
debug,
|
||||
dockerId,
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import TOML from '@iarna/toml';
|
||||
import { asyncExecShell } from '../common';
|
||||
import { executeCommand } from '../common';
|
||||
import { buildCacheImageWithCargo, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image, name): Promise<void> => {
|
||||
@@ -28,7 +28,7 @@ const createDockerfile = async (data, image, name): Promise<void> => {
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { workdir, baseImage, baseBuildImage } = data;
|
||||
const { stdout: cargoToml } = await asyncExecShell(`cat ${workdir}/Cargo.toml`);
|
||||
const { stdout: cargoToml } = await executeCommand({ command: `cat ${workdir}/Cargo.toml` });
|
||||
const parsedToml: any = TOML.parse(cargoToml);
|
||||
const name = parsedToml.package.name;
|
||||
await buildCacheImageWithCargo(data, baseBuildImage);
|
||||
|
@@ -17,6 +17,7 @@ import { checkContainer, removeContainer } from './docker';
|
||||
import { day } from './dayjs';
|
||||
import { saveBuildLog, saveDockerRegistryCredentials } from './buildPacks/common';
|
||||
import { scheduler } from './scheduler';
|
||||
import type { ExecaChildProcess } from 'execa';
|
||||
|
||||
export const version = '3.12.0';
|
||||
export const isDev = process.env.NODE_ENV === 'development';
|
||||
@@ -63,7 +64,6 @@ const otherTraefikEndpoint = isDev
|
||||
: 'http://coolify:3000/webhooks/traefik/other.json';
|
||||
|
||||
export const uniqueName = (): string => uniqueNamesGenerator(customConfig);
|
||||
export const asyncExecShell = util.promisify(exec);
|
||||
export const asyncExecShellStream = async ({
|
||||
debug,
|
||||
buildId,
|
||||
@@ -303,7 +303,7 @@ export async function isDomainConfigured({
|
||||
|
||||
export async function getContainerUsage(dockerId: string, container: string): Promise<any> {
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"`
|
||||
});
|
||||
@@ -508,36 +508,13 @@ export async function createRemoteEngineConfiguration(id: string) {
|
||||
remoteUser
|
||||
} = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
||||
await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 });
|
||||
// Needed for remote docker compose
|
||||
// const { stdout: numberOfSSHAgentsRunning } = await asyncExecShell(
|
||||
// `ps ax | grep [s]sh-agent | grep coolify-ssh-agent.pid | grep -v grep | wc -l`
|
||||
// );
|
||||
// if (numberOfSSHAgentsRunning !== '' && Number(numberOfSSHAgentsRunning.trim()) == 0) {
|
||||
// try {
|
||||
// await fs.stat(`/tmp/coolify-ssh-agent.pid`);
|
||||
// await fs.rm(`/tmp/coolify-ssh-agent.pid`);
|
||||
// } catch (error) { }
|
||||
// await asyncExecShell(`eval $(ssh-agent -sa /tmp/coolify-ssh-agent.pid)`);
|
||||
// }
|
||||
// await asyncExecShell(`SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh-add -q ${sshKeyFile}`);
|
||||
|
||||
// const { stdout: numberOfSSHTunnelsRunning } = await asyncExecShell(
|
||||
// `ps ax | grep 'ssh -F /dev/null -o StrictHostKeyChecking no -fNL ${localPort}:localhost:${remotePort}' | grep -v grep | wc -l`
|
||||
// );
|
||||
// if (numberOfSSHTunnelsRunning !== '' && Number(numberOfSSHTunnelsRunning.trim()) == 0) {
|
||||
// try {
|
||||
// await asyncExecShell(
|
||||
// `SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh -F /dev/null -o "StrictHostKeyChecking no" -fNL ${localPort}:localhost:${remotePort} ${remoteUser}@${remoteIpAddress}`
|
||||
// );
|
||||
// } catch (error) { }
|
||||
// }
|
||||
const config = sshConfig.parse('');
|
||||
const Host = `${remoteIpAddress}-remote`
|
||||
|
||||
try {
|
||||
await asyncExecShell(`ssh-keygen -R ${Host}`);
|
||||
await asyncExecShell(`ssh-keygen -R ${remoteIpAddress}`);
|
||||
await asyncExecShell(`ssh-keygen -R localhost:${localPort}`);
|
||||
await executeCommand({ command: `ssh-keygen -R ${Host}` });
|
||||
await executeCommand({ command: `ssh-keygen -R ${remoteIpAddress}` });
|
||||
await executeCommand({ command: `ssh-keygen -R localhost:${localPort}` });
|
||||
} catch (error) { }
|
||||
|
||||
|
||||
@@ -566,8 +543,102 @@ export async function createRemoteEngineConfiguration(id: string) {
|
||||
}
|
||||
return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config));
|
||||
}
|
||||
export async function executeCommand({ command, dockerId = null, sshCommand = false, shell = false, buildId, applicationId, debug }: { command: string, sshCommand?: boolean, shell?: boolean, dockerId?: string, buildId?: string, applicationId?: string, debug?: boolean }): Promise<ExecaChildProcess<string>> {
|
||||
const { execa, execaCommand } = await import('execa')
|
||||
const { parse } = await import('shell-quote')
|
||||
const parsedCommand = parse(command);
|
||||
const dockerCommand = parsedCommand[0];
|
||||
const dockerArgs = parsedCommand.slice(1);
|
||||
|
||||
if (dockerId) {
|
||||
let { remoteEngine, remoteIpAddress, engine } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } })
|
||||
if (remoteEngine) {
|
||||
await createRemoteEngineConfiguration(dockerId);
|
||||
engine = `ssh://${remoteIpAddress}-remote`;
|
||||
} else {
|
||||
engine = 'unix:///var/run/docker.sock';
|
||||
}
|
||||
if (process.env.CODESANDBOX_HOST) {
|
||||
if (command.startsWith('docker compose')) {
|
||||
command = command.replace(/docker compose/gi, 'docker-compose');
|
||||
}
|
||||
}
|
||||
if (sshCommand) {
|
||||
if (shell) {
|
||||
return execaCommand(`ssh ${remoteIpAddress}-remote ${command}`, { shell: true, stdio: 'inherit' });
|
||||
}
|
||||
return await execa('ssh', [`${remoteIpAddress}-remote`, ...dockerArgs]);
|
||||
}
|
||||
return await new Promise(async (resolve, reject) => {
|
||||
let subprocess = null;
|
||||
if (shell) {
|
||||
subprocess = execaCommand(command, {
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
});
|
||||
} else {
|
||||
subprocess = execa(dockerCommand, dockerArgs, {
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
});
|
||||
}
|
||||
const logs = [];
|
||||
subprocess.stdout.on('data', async (data) => {
|
||||
const stdout = data.toString();
|
||||
const array = stdout.split('\n');
|
||||
for (const line of array) {
|
||||
if (line !== '\n' && line !== '') {
|
||||
const log = {
|
||||
line: `${line.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
}
|
||||
logs.push(log);
|
||||
if (debug) {
|
||||
await saveBuildLog(log);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
subprocess.stderr.on('data', async (data) => {
|
||||
const stderr = data.toString();
|
||||
const array = stderr.split('\n');
|
||||
for (const line of array) {
|
||||
if (line !== '\n' && line !== '') {
|
||||
const log = {
|
||||
line: `${line.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
}
|
||||
logs.push(log);
|
||||
if (debug) {
|
||||
await saveBuildLog(log);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
subprocess.on('exit', async (code) => {
|
||||
await asyncSleep(1000);
|
||||
if (code === 0) {
|
||||
resolve(code);
|
||||
} else {
|
||||
if (!debug) {
|
||||
for (const log of logs) {
|
||||
await saveBuildLog(log);
|
||||
}
|
||||
}
|
||||
reject(code);
|
||||
}
|
||||
});
|
||||
})
|
||||
} else {
|
||||
if (shell) {
|
||||
return execaCommand(command, { shell: true });
|
||||
}
|
||||
return await execa(dockerCommand, dockerArgs);
|
||||
}
|
||||
}
|
||||
export async function executeSSHCmd({ dockerId, command }) {
|
||||
const { execaCommand } = await import('execa')
|
||||
const { execaCommand, execa } = await import('execa')
|
||||
const { parse } = await import('shell-quote')
|
||||
let { remoteEngine, remoteIpAddress } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } })
|
||||
if (remoteEngine) {
|
||||
await createRemoteEngineConfiguration(dockerId)
|
||||
@@ -577,10 +648,12 @@ export async function executeSSHCmd({ dockerId, command }) {
|
||||
command = command.replace(/docker compose/gi, 'docker-compose')
|
||||
}
|
||||
}
|
||||
return await execaCommand(`ssh ${remoteIpAddress}-remote ${command}`)
|
||||
const dockerArgs = parse(command);
|
||||
return await execa('ssh', [`${remoteIpAddress}-remote`, ...dockerArgs]);
|
||||
}
|
||||
export async function executeDockerCmd({ debug, buildId, applicationId, dockerId, command }: { debug?: boolean, buildId?: string, applicationId?: string, dockerId: string, command: string }): Promise<any> {
|
||||
const { execaCommand } = await import('execa')
|
||||
const { execa } = await import('execa')
|
||||
const { parse } = await import('shell-quote')
|
||||
let { remoteEngine, remoteIpAddress, engine } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } })
|
||||
if (remoteEngine) {
|
||||
await createRemoteEngineConfiguration(dockerId);
|
||||
@@ -593,10 +666,13 @@ export async function executeDockerCmd({ debug, buildId, applicationId, dockerId
|
||||
command = command.replace(/docker compose/gi, 'docker-compose');
|
||||
}
|
||||
}
|
||||
const parsedCommand = parse(command);
|
||||
const dockerCommand = parsedCommand[0];
|
||||
const dockerArgs = parsedCommand.slice(1);
|
||||
if (command.startsWith(`docker build`) || command.startsWith(`pack build`) || command.startsWith(`docker compose build`)) {
|
||||
return await asyncExecShellStream({ debug, buildId, applicationId, command, engine });
|
||||
}
|
||||
return await execaCommand(command, { env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine }, shell: true })
|
||||
return await execa(dockerCommand, dockerArgs, { env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine } });
|
||||
}
|
||||
export async function startTraefikProxy(id: string): Promise<void> {
|
||||
const { engine, network, remoteEngine, remoteIpAddress } = await prisma.destinationDocker.findUnique({ where: { id } })
|
||||
@@ -604,18 +680,18 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
||||
const { id: settingsId, ipv4, ipv6 } = await listSettings();
|
||||
|
||||
if (!found) {
|
||||
const { stdout: coolifyNetwork } = await executeDockerCmd({
|
||||
const { stdout: coolifyNetwork } = await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`
|
||||
});
|
||||
|
||||
if (!coolifyNetwork) {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker network create --attachable coolify-infra`
|
||||
});
|
||||
}
|
||||
const { stdout: Config } = await executeDockerCmd({
|
||||
const { stdout: Config } = await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||
});
|
||||
@@ -630,7 +706,7 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
||||
}
|
||||
traefikUrl = `${ip}/webhooks/traefik/remote/${id}`;
|
||||
}
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker run --restart always \
|
||||
--add-host 'host.docker.internal:host-gateway' \
|
||||
@@ -655,7 +731,6 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
||||
--certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web \
|
||||
--log.level=error`
|
||||
});
|
||||
await prisma.setting.update({ where: { id: settingsId }, data: { proxyHash: null } });
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: true }
|
||||
@@ -679,13 +754,13 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
||||
|
||||
export async function configureNetworkTraefikProxy(destination: any): Promise<void> {
|
||||
const { id } = destination;
|
||||
const { stdout: networks } = await executeDockerCmd({
|
||||
const { stdout: networks } = await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'`
|
||||
});
|
||||
const configuredNetworks = networks.replace(/"/g, '').replace('\n', '').split(',');
|
||||
if (!configuredNetworks.includes(destination.network)) {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId: destination.id,
|
||||
command: `docker network connect ${destination.network} coolify-proxy`
|
||||
});
|
||||
@@ -700,13 +775,12 @@ export async function stopTraefikProxy(
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: false }
|
||||
});
|
||||
const { id: settingsId } = await prisma.setting.findFirst({});
|
||||
await prisma.setting.update({ where: { id: settingsId }, data: { proxyHash: null } });
|
||||
try {
|
||||
if (found) {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy`
|
||||
command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -1099,9 +1173,9 @@ export const createDirectories = async ({
|
||||
workdirFound = !!(await fs.stat(workdir));
|
||||
} catch (error) { }
|
||||
if (workdirFound) {
|
||||
await asyncExecShell(`rm -fr ${workdir}`);
|
||||
await executeCommand({ command: `rm -fr ${workdir}` });
|
||||
}
|
||||
await asyncExecShell(`mkdir -p ${workdir}`);
|
||||
await executeCommand({ command: `mkdir -p ${workdir}` });
|
||||
return {
|
||||
workdir,
|
||||
repodir
|
||||
@@ -1117,7 +1191,7 @@ export async function stopDatabaseContainer(database: any): Promise<boolean> {
|
||||
} = database;
|
||||
if (destinationDockerId) {
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker inspect --format '{{json .State}}' ${id}`
|
||||
});
|
||||
@@ -1145,9 +1219,10 @@ export async function stopTcpHttpProxy(
|
||||
const { found } = await checkContainer({ dockerId, container });
|
||||
try {
|
||||
if (found) {
|
||||
return await executeDockerCmd({
|
||||
return await executeCommand({
|
||||
dockerId,
|
||||
command: `docker stop -t 0 ${container} && docker rm ${container}`
|
||||
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -1169,34 +1244,34 @@ export async function updatePasswordInDb(database, user, newPassword, isRoot) {
|
||||
} = database;
|
||||
if (destinationDockerId) {
|
||||
if (type === 'mysql') {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"ALTER USER '${user}'@'%' IDENTIFIED WITH caching_sha2_password BY '${newPassword}';\"`
|
||||
});
|
||||
} else if (type === 'mariadb') {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"SET PASSWORD FOR '${user}'@'%' = PASSWORD('${newPassword}');\"`
|
||||
});
|
||||
} else if (type === 'postgresql') {
|
||||
if (isRoot) {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker exec ${id} psql postgresql://postgres:${rootUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role postgres WITH PASSWORD '${newPassword}'"`
|
||||
});
|
||||
} else {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker exec ${id} psql postgresql://${dbUser}:${dbUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role ${user} WITH PASSWORD '${newPassword}'"`
|
||||
});
|
||||
}
|
||||
} else if (type === 'mongodb') {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker exec ${id} mongo 'mongodb://${rootUser}:${rootUserPassword}@${id}:27017/admin?readPreference=primary&ssl=false' --eval "db.changeUserPassword('${user}','${newPassword}')"`
|
||||
});
|
||||
} else if (type === 'redis') {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker exec ${id} redis-cli -u redis://${dbUserPassword}@${id}:6379 --raw CONFIG SET requirepass ${newPassword}`
|
||||
});
|
||||
@@ -1370,7 +1445,7 @@ export async function startTraefikTCPProxy(
|
||||
});
|
||||
try {
|
||||
if (foundDependentContainer && !found) {
|
||||
const { stdout: Config } = await executeDockerCmd({
|
||||
const { stdout: Config } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||
});
|
||||
@@ -1417,16 +1492,17 @@ export async function startTraefikTCPProxy(
|
||||
}
|
||||
};
|
||||
await fs.writeFile(`/tmp/docker-compose-${id}.yaml`, yaml.dump(tcpProxy));
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker compose -f /tmp/docker-compose-${id}.yaml up -d`
|
||||
});
|
||||
await fs.rm(`/tmp/docker-compose-${id}.yaml`);
|
||||
}
|
||||
if (!foundDependentContainer && found) {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker stop -t 0 ${container} && docker rm ${container}`
|
||||
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -1537,7 +1613,7 @@ export async function stopBuild(buildId, applicationId) {
|
||||
await cleanupDB(buildId, applicationId);
|
||||
return reject(new Error('Canceled.'));
|
||||
}
|
||||
const { stdout: buildContainers } = await executeDockerCmd({
|
||||
const { stdout: buildContainers } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'`
|
||||
});
|
||||
@@ -1580,26 +1656,28 @@ export function convertTolOldVolumeNames(type) {
|
||||
export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) {
|
||||
// Cleanup old coolify images
|
||||
try {
|
||||
let { stdout: images } = await executeDockerCmd({
|
||||
let { stdout: images } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r`
|
||||
command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r`,
|
||||
shell: true
|
||||
});
|
||||
|
||||
images = images.trim();
|
||||
if (images) {
|
||||
await executeDockerCmd({ dockerId, command: `docker rmi -f ${images}" -q | xargs -r` });
|
||||
await executeCommand({ dockerId, command: `docker rmi -f ${images}" -q | xargs -r`, shell: true });
|
||||
}
|
||||
} catch (error) { }
|
||||
if (lowDiskSpace || force) {
|
||||
// Cleanup images that are not used
|
||||
try {
|
||||
await executeDockerCmd({ dockerId, command: `docker image prune -f` });
|
||||
await executeCommand({ dockerId, command: `docker image prune -f` });
|
||||
} catch (error) { }
|
||||
|
||||
const { numberOfDockerImagesKeptLocally } = await prisma.setting.findUnique({ where: { id: '0' } })
|
||||
const { stdout: images } = await executeDockerCmd({
|
||||
const { stdout: images } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker images | grep -v "<none>" | grep -v REPOSITORY | awk '{print $1, $2}'`
|
||||
command: `docker images | grep -v "<none>" | grep -v REPOSITORY | awk '{print $1, $2}'`,
|
||||
shell: true
|
||||
});
|
||||
const imagesArray = images.trim().replaceAll(' ', ':').split('\n');
|
||||
const imagesSet = new Set(imagesArray.map((image) => image.split(':')[0]));
|
||||
@@ -1618,12 +1696,12 @@ export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) {
|
||||
}
|
||||
}
|
||||
for (const image of deleteImage) {
|
||||
await executeDockerCmd({ dockerId, command: `docker image rm -f ${image}` });
|
||||
await executeCommand({ dockerId, command: `docker image rm -f ${image}` });
|
||||
}
|
||||
|
||||
// Prune coolify managed containers
|
||||
try {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container prune -f --filter "label=coolify.managed=true"`
|
||||
});
|
||||
@@ -1631,7 +1709,7 @@ export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) {
|
||||
|
||||
// Cleanup build caches
|
||||
try {
|
||||
await executeDockerCmd({ dockerId, command: `docker builder prune -a -f` });
|
||||
await executeCommand({ dockerId, command: `docker builder prune -a -f` });
|
||||
} catch (error) { }
|
||||
}
|
||||
}
|
||||
@@ -1718,11 +1796,11 @@ export async function pushToRegistry(application: any, workdir: string, tag: str
|
||||
const location = `${workdir}/.docker`
|
||||
const tagCommand = `docker tag ${application.id}:${tag} ${imageName}:${customTag}`
|
||||
const pushCommand = `docker --config ${location} push ${imageName}:${customTag}`
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId: application.destinationDockerId,
|
||||
command: tagCommand
|
||||
})
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId: application.destinationDockerId,
|
||||
command: pushCommand
|
||||
})
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { executeDockerCmd } from './common';
|
||||
import { executeCommand } from './common';
|
||||
|
||||
export function formatLabelsOnDocker(data) {
|
||||
return data.trim().split('\n').map(a => JSON.parse(a)).map((container) => {
|
||||
@@ -16,7 +16,7 @@ export function formatLabelsOnDocker(data) {
|
||||
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
|
||||
let containerFound = false;
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command:
|
||||
`docker inspect --format '{{json .State}}' ${container}`
|
||||
@@ -28,14 +28,14 @@ export async function checkContainer({ dockerId, container, remove = false }: {
|
||||
const isRestarting = status === 'restarting'
|
||||
const isExited = status === 'exited'
|
||||
if (status === 'created') {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command:
|
||||
`docker rm ${container}`
|
||||
});
|
||||
}
|
||||
if (remove && status === 'exited') {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command:
|
||||
`docker rm ${container}`
|
||||
@@ -62,7 +62,7 @@ export async function checkContainer({ dockerId, container, remove = false }: {
|
||||
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
|
||||
let isExited = false;
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
|
||||
const { stdout } = await executeCommand({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
|
||||
if (stdout.trim() === 'exited') {
|
||||
isExited = true;
|
||||
}
|
||||
@@ -81,13 +81,13 @@ export async function removeContainer({
|
||||
dockerId: string;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||
const { stdout } = await executeCommand({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||
if (JSON.parse(stdout).Running) {
|
||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` })
|
||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
||||
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` })
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||
}
|
||||
if (JSON.parse(stdout).Status === 'exited') {
|
||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
|
@@ -1,7 +1,7 @@
|
||||
|
||||
import jsonwebtoken from 'jsonwebtoken';
|
||||
import { saveBuildLog } from '../buildPacks/common';
|
||||
import { asyncExecShell, decrypt, prisma } from '../common';
|
||||
import { decrypt, executeCommand, prisma } from '../common';
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
@@ -43,9 +43,11 @@ export default async function ({
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
);
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
|
||||
} else {
|
||||
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||
@@ -81,11 +83,13 @@ export default async function ({
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
);
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
||||
|
||||
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
console.log({ commit })
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import { saveBuildLog } from "../buildPacks/common";
|
||||
import { asyncExecShell } from "../common";
|
||||
import { executeCommand } from "../common";
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
@@ -28,8 +28,8 @@ export default async function ({
|
||||
}): Promise<string> {
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||
if (!forPublic) {
|
||||
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
|
||||
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
|
||||
await executeCommand({ command: `echo '${privateSshKey}' > ${repodir}/id.rsa`, shell: true });
|
||||
await executeCommand({ command: `chmod 600 ${repodir}/id.rsa` });
|
||||
}
|
||||
|
||||
await saveBuildLog({
|
||||
@@ -45,15 +45,19 @@ export default async function ({
|
||||
});
|
||||
}
|
||||
if (forPublic) {
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
||||
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@ import type { FastifyReply, FastifyRequest } from 'fastify';
|
||||
import fs from 'fs/promises';
|
||||
import yaml from 'js-yaml';
|
||||
import path from 'path';
|
||||
import { asyncSleep, ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, errorHandler, executeDockerCmd, getServiceFromDB, isARM, makeLabelForServices, persistentVolumes, prisma, stopTcpHttpProxy } from '../common';
|
||||
import { asyncSleep, ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, errorHandler, executeCommand, getServiceFromDB, isARM, makeLabelForServices, persistentVolumes, prisma, stopTcpHttpProxy } from '../common';
|
||||
import { parseAndFindServiceTemplates } from '../../routes/api/v1/services/handlers';
|
||||
|
||||
import { ServiceStartStop } from '../../routes/api/v1/services/types';
|
||||
@@ -15,14 +15,19 @@ export async function stopService(request: FastifyRequest<ServiceStartStop>) {
|
||||
const teamId = request.user.teamId;
|
||||
const { destinationDockerId } = await getServiceFromDB({ id, teamId });
|
||||
if (destinationDockerId) {
|
||||
await executeDockerCmd({
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
||||
})
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}`
|
||||
})
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
return {}
|
||||
}
|
||||
throw { status: 500, message: 'Could not stop containers.' }
|
||||
@@ -182,19 +187,36 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
||||
// Workaround: Stop old minio proxies
|
||||
if (service.type === 'minio') {
|
||||
try {
|
||||
await executeDockerCmd({
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDocker.id,
|
||||
command:
|
||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}|xargs -r -n 1 docker container stop -t 0`
|
||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||
});
|
||||
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) { }
|
||||
try {
|
||||
await executeDockerCmd({
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDocker.id,
|
||||
command:
|
||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}|xargs -r -n 1 docker container rm -f`
|
||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) { }
|
||||
}
|
||||
return {}
|
||||
@@ -205,16 +227,16 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
||||
async function startServiceContainers(fastify, id, teamId, dockerId, composeFileDestination) {
|
||||
try {
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Pulling images...' })
|
||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} pull` })
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} pull` })
|
||||
} catch (error) { }
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Building images...' })
|
||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` })
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` })
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Creating containers...' })
|
||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} create` })
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} create` })
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Starting containers...' })
|
||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} start` })
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} start` })
|
||||
await asyncSleep(1000);
|
||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` })
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` })
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 0 })
|
||||
}
|
||||
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||
@@ -226,7 +248,7 @@ export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply:
|
||||
destinationDocker,
|
||||
} = await getServiceFromDB({ id, teamId });
|
||||
if (destinationDockerId) {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker exec ${id} migrate`
|
||||
})
|
||||
|
Reference in New Issue
Block a user