Features:
- Rust support 🦀 (Thanks to @pepoviola)
- Add a default rewrite rule to PHP apps (to index.php)
- Able to control upgrades in a straightforward way

Fixes:
- Improved upgrade scripts
- Simplified prechecks before deployment
- Fixed path deployments
- Fixed already defined apps redirections
- Better error handling - still needs a lot of improvement here!
This commit is contained in:
Andras Bacsai
2021-04-15 22:40:44 +02:00
committed by GitHub
parent 166a573392
commit bad84289c4
56 changed files with 899 additions and 661 deletions

View File

@@ -1,3 +1,4 @@
node_modules node_modules
dist dist
.routify .routify
.pnpm-store

1
.gitignore vendored
View File

@@ -9,3 +9,4 @@ yarn-error.log
api/development/console.log api/development/console.log
.pnpm-debug.log .pnpm-debug.log
yarn.lock yarn.lock
.pnpm-store

View File

@@ -12,6 +12,7 @@ module.exports = async function (fastify, opts) {
server.register(require('./routes/v1/application/deploy'), { prefix: '/application/deploy' }) server.register(require('./routes/v1/application/deploy'), { prefix: '/application/deploy' })
server.register(require('./routes/v1/application/deploy/logs'), { prefix: '/application/deploy/logs' }) server.register(require('./routes/v1/application/deploy/logs'), { prefix: '/application/deploy/logs' })
server.register(require('./routes/v1/databases'), { prefix: '/databases' }) server.register(require('./routes/v1/databases'), { prefix: '/databases' })
server.register(require('./routes/v1/server'), { prefix: '/server' })
}) })
// Public routes // Public routes
fastify.register(require('./routes/v1/verify'), { prefix: '/verify' }) fastify.register(require('./routes/v1/verify'), { prefix: '/verify' })

View File

@@ -0,0 +1,19 @@
const fs = require('fs').promises
const { streamEvents, docker } = require('../../libs/docker')
module.exports = async function (configuration) {
try {
const path = `${configuration.general.workdir}/${configuration.build.directory ? configuration.build.directory : ''}`
if (fs.stat(`${path}/Dockerfile`)) {
const stream = await docker.engine.buildImage(
{ src: ['.'], context: path },
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
)
await streamEvents(stream, configuration)
} else {
throw { error: 'No custom dockerfile found.', type: 'app' }
}
} catch (error) {
throw { error, type: 'server' }
}
}

View File

@@ -10,12 +10,16 @@ const buildImageNodeDocker = (configuration) => {
].join('\n') ].join('\n')
} }
async function buildImage (configuration) { async function buildImage (configuration) {
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, buildImageNodeDocker(configuration)) try {
const stream = await docker.engine.buildImage( await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, buildImageNodeDocker(configuration))
{ src: ['.'], context: configuration.general.workdir }, const stream = await docker.engine.buildImage(
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` } { src: ['.'], context: configuration.general.workdir },
) { t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
await streamEvents(stream, configuration) )
await streamEvents(stream, configuration)
} catch (error) {
throw { error, type: 'server' }
}
} }
module.exports = { module.exports = {

View File

@@ -2,5 +2,6 @@ const static = require('./static')
const nodejs = require('./nodejs') const nodejs = require('./nodejs')
const php = require('./php') const php = require('./php')
const custom = require('./custom') const custom = require('./custom')
const rust = require('./rust')
module.exports = { static, nodejs, php, custom } module.exports = { static, nodejs, php, custom, rust }

View File

@@ -1,7 +1,7 @@
const fs = require('fs').promises const fs = require('fs').promises
const { buildImage } = require('../helpers') const { buildImage } = require('../helpers')
const { streamEvents, docker } = require('../../libs/docker') const { streamEvents, docker } = require('../../libs/docker')
// `HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost:${configuration.publish.port}${configuration.publish.path} || exit 1`,
const publishNodejsDocker = (configuration) => { const publishNodejsDocker = (configuration) => {
return [ return [
'FROM node:lts', 'FROM node:lts',
@@ -16,11 +16,15 @@ const publishNodejsDocker = (configuration) => {
} }
module.exports = async function (configuration) { module.exports = async function (configuration) {
if (configuration.build.command.build) await buildImage(configuration) try {
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishNodejsDocker(configuration)) if (configuration.build.command.build) await buildImage(configuration)
const stream = await docker.engine.buildImage( await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishNodejsDocker(configuration))
{ src: ['.'], context: configuration.general.workdir }, const stream = await docker.engine.buildImage(
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` } { src: ['.'], context: configuration.general.workdir },
) { t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
await streamEvents(stream, configuration) )
await streamEvents(stream, configuration)
} catch (error) {
throw { error, type: 'server' }
}
} }

View File

@@ -0,0 +1,26 @@
const fs = require('fs').promises
const { streamEvents, docker } = require('../../libs/docker')
// 'HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost/ || exit 1',
const publishPHPDocker = (configuration) => {
return [
'FROM php:apache',
'RUN a2enmod rewrite',
'WORKDIR /usr/src/app',
`COPY .${configuration.build.directory} /var/www/html`,
'EXPOSE 80',
' CMD ["apache2-foreground"]'
].join('\n')
}
module.exports = async function (configuration) {
try {
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishPHPDocker(configuration))
const stream = await docker.engine.buildImage(
{ src: ['.'], context: configuration.general.workdir },
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
)
await streamEvents(stream, configuration)
} catch (error) {
throw { error, type: 'server' }
}
}

View File

@@ -0,0 +1,64 @@
const fs = require('fs').promises
const { streamEvents, docker } = require('../../libs/docker')
const { execShellAsync } = require('../../libs/common')
const TOML = require('@iarna/toml')
const publishRustDocker = (configuration, custom) => {
return [
'FROM rust:latest',
'WORKDIR /app',
`COPY --from=${configuration.build.container.name}:cache /app/target target`,
`COPY --from=${configuration.build.container.name}:cache /usr/local/cargo /usr/local/cargo`,
'COPY . .',
`RUN cargo build --release --bin ${custom.name}`,
'FROM debian:buster-slim',
'WORKDIR /app',
'RUN apt-get update -y && apt-get install -y --no-install-recommends openssl libcurl4 ca-certificates && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*',
'RUN update-ca-certificates',
`COPY --from=${configuration.build.container.name}:cache /app/target/release/${custom.name} ${custom.name}`,
`EXPOSE ${configuration.publish.port}`,
`CMD ["/app/${custom.name}"]`
].join('\n')
}
const cacheRustDocker = (configuration, custom) => {
return [
`FROM rust:latest AS planner-${configuration.build.container.name}`,
'WORKDIR /app',
'RUN cargo install cargo-chef',
'COPY . .',
'RUN cargo chef prepare --recipe-path recipe.json',
'FROM rust:latest',
'WORKDIR /app',
'RUN cargo install cargo-chef',
`COPY --from=planner-${configuration.build.container.name} /app/recipe.json recipe.json`,
'RUN cargo chef cook --release --recipe-path recipe.json'
].join('\n')
}
module.exports = async function (configuration) {
try {
const cargoToml = await execShellAsync(`cat ${configuration.general.workdir}/Cargo.toml`)
const parsedToml = TOML.parse(cargoToml)
const custom = {
name: parsedToml.package.name
}
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, cacheRustDocker(configuration, custom))
let stream = await docker.engine.buildImage(
{ src: ['.'], context: configuration.general.workdir },
{ t: `${configuration.build.container.name}:cache` }
)
await streamEvents(stream, configuration)
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishRustDocker(configuration, custom))
stream = await docker.engine.buildImage(
{ src: ['.'], context: configuration.general.workdir },
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
)
await streamEvents(stream, configuration)
} catch (error) {
throw { error, type: 'server' }
}
}

View File

@@ -2,6 +2,7 @@ const fs = require('fs').promises
const { buildImage } = require('../helpers') const { buildImage } = require('../helpers')
const { streamEvents, docker } = require('../../libs/docker') const { streamEvents, docker } = require('../../libs/docker')
// 'HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost/ || exit 1',
const publishStaticDocker = (configuration) => { const publishStaticDocker = (configuration) => {
return [ return [
'FROM nginx:stable-alpine', 'FROM nginx:stable-alpine',
@@ -16,12 +17,16 @@ const publishStaticDocker = (configuration) => {
} }
module.exports = async function (configuration) { module.exports = async function (configuration) {
if (configuration.build.command.build) await buildImage(configuration) try {
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishStaticDocker(configuration)) if (configuration.build.command.build) await buildImage(configuration)
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishStaticDocker(configuration))
const stream = await docker.engine.buildImage( const stream = await docker.engine.buildImage(
{ src: ['.'], context: configuration.general.workdir }, { src: ['.'], context: configuration.general.workdir },
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` } { t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
) )
await streamEvents(stream, configuration) await streamEvents(stream, configuration)
} catch (error) {
throw { error, type: 'server' }
}
} }

View File

@@ -1,4 +1,4 @@
const packs = require('../../../packs') const packs = require('../../../buildPacks')
const { saveAppLog } = require('../../logging') const { saveAppLog } = require('../../logging')
const Deployment = require('../../../models/Deployment') const Deployment = require('../../../models/Deployment')
@@ -26,9 +26,14 @@ module.exports = async function (configuration) {
throw { error, type: 'app' } throw { error, type: 'app' }
} }
} else { } else {
await Deployment.findOneAndUpdate( try {
{ repoId: id, branch, deployId, organization, name, domain }, await Deployment.findOneAndUpdate(
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' }) { repoId: id, branch, deployId, organization, name, domain },
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
} catch (error) {
// Hmm.
}
throw { error: 'No buildpack found.', type: 'app' } throw { error: 'No buildpack found.', type: 'app' }
} }
} }

View File

@@ -2,17 +2,16 @@ const { docker } = require('../../docker')
const { execShellAsync } = require('../../common') const { execShellAsync } = require('../../common')
const Deployment = require('../../../models/Deployment') const Deployment = require('../../../models/Deployment')
async function purgeOldThings () { async function purgeImagesContainers () {
try { try {
// TODO: Tweak this, because it deletes coolify-base, so the upgrade will be slow await execShellAsync('docker container prune -f')
await docker.engine.pruneImages() await execShellAsync('docker image prune -f --filter=label!=coolify-reserve=true')
await docker.engine.pruneContainers()
} catch (error) { } catch (error) {
throw { error, type: 'server' } throw { error, type: 'server' }
} }
} }
async function cleanup (configuration) { async function cleanupStuckedDeploymentsInDB (configuration) {
const { id } = configuration.repository const { id } = configuration.repository
const deployId = configuration.general.deployId const deployId = configuration.general.deployId
try { try {
@@ -39,4 +38,4 @@ async function deleteSameDeployments (configuration) {
} }
} }
module.exports = { cleanup, deleteSameDeployments, purgeOldThings } module.exports = { cleanupStuckedDeploymentsInDB, deleteSameDeployments, purgeImagesContainers }

View File

@@ -1,7 +1,7 @@
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator') const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
const cuid = require('cuid') const cuid = require('cuid')
const crypto = require('crypto') const crypto = require('crypto')
const { docker } = require('../docker')
const { execShellAsync } = require('../common') const { execShellAsync } = require('../common')
function getUniq () { function getUniq () {
@@ -30,7 +30,8 @@ function setDefaultConfiguration (configuration) {
rollback_config: { rollback_config: {
parallelism: 1, parallelism: 1,
delay: '10s', delay: '10s',
order: 'start-first' order: 'start-first',
failure_action: 'rollback'
} }
} }
@@ -48,11 +49,18 @@ function setDefaultConfiguration (configuration) {
configuration.publish.port = 80 configuration.publish.port = 80
} else if (configuration.build.pack === 'nodejs') { } else if (configuration.build.pack === 'nodejs') {
configuration.publish.port = 3000 configuration.publish.port = 3000
} else if (configuration.build.pack === 'rust') {
configuration.publish.port = 3000
} }
} }
if (!configuration.build.directory) { if (!configuration.build.directory) {
configuration.build.directory = '/' configuration.build.directory = '/'
} }
if (!configuration.publish.directory) {
configuration.publish.directory = '/'
}
if (configuration.build.pack === 'static' || configuration.build.pack === 'nodejs') { if (configuration.build.pack === 'static' || configuration.build.pack === 'nodejs') {
if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install' if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install'
} }
@@ -66,8 +74,9 @@ function setDefaultConfiguration (configuration) {
} }
} }
async function updateServiceLabels (configuration, services) { async function updateServiceLabels (configuration) {
// In case of any failure during deployment, still update the current configuration. // In case of any failure during deployment, still update the current configuration.
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
const found = services.find(s => { const found = services.find(s => {
const config = JSON.parse(s.Spec.Labels.configuration) const config = JSON.parse(s.Spec.Labels.configuration)
if (config.repository.id === configuration.repository.id && config.repository.branch === configuration.repository.branch) { if (config.repository.id === configuration.repository.id && config.repository.branch === configuration.repository.branch) {
@@ -79,10 +88,58 @@ async function updateServiceLabels (configuration, services) {
const { ID } = found const { ID } = found
try { try {
const Labels = { ...JSON.parse(found.Spec.Labels.configuration), ...configuration } const Labels = { ...JSON.parse(found.Spec.Labels.configuration), ...configuration }
execShellAsync(`docker service update --label-add configuration='${JSON.stringify(Labels)}' --label-add com.docker.stack.image='${configuration.build.container.name}:${configuration.build.container.tag}' ${ID}`) await execShellAsync(`docker service update --label-add configuration='${JSON.stringify(Labels)}' --label-add com.docker.stack.image='${configuration.build.container.name}:${configuration.build.container.tag}' ${ID}`)
} catch (error) { } catch (error) {
console.log(error) console.log(error)
} }
} }
} }
module.exports = { setDefaultConfiguration, updateServiceLabels }
async function precheckDeployment ({ services, configuration }) {
let foundService = false
let configChanged = false
let imageChanged = false
let forceUpdate = false
for (const service of services) {
const running = JSON.parse(service.Spec.Labels.configuration)
if (running) {
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
// Base service configuration changed
if (!running.build.container.baseSHA || running.build.container.baseSHA !== configuration.build.container.baseSHA) {
forceUpdate = true
}
// If the deployment is in error state, forceUpdate
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running' && n.Image.split(':')[1] === running.build.container.tag)
if (isError.length > 0) forceUpdate = true
foundService = true
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
delete runningWithoutContainer.build.container
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
delete configurationWithoutContainer.build.container
// If only the configuration changed
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
// If only the image changed
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
// If build pack changed, forceUpdate the service
if (running.build.pack !== configuration.build.pack) forceUpdate = true
}
}
}
if (forceUpdate) {
imageChanged = false
configChanged = false
}
return {
foundService,
imageChanged,
configChanged,
forceUpdate
}
}
module.exports = { setDefaultConfiguration, updateServiceLabels, precheckDeployment }

View File

@@ -1,52 +1,63 @@
const fs = require('fs').promises const fs = require('fs').promises
module.exports = async function (configuration) { module.exports = async function (configuration) {
try { try {
// TODO: Do it better. // TODO: Write full .dockerignore for all deployments!!
await fs.writeFile(`${configuration.general.workdir}/.dockerignore`, 'node_modules') if (configuration.build.pack === 'php') {
await fs.writeFile( await fs.writeFile(`${configuration.general.workdir}/.htaccess`, `
`${configuration.general.workdir}/nginx.conf`, RewriteEngine On
`user nginx; RewriteBase /
worker_processes auto; RewriteCond %{REQUEST_FILENAME} !-d
RewriteCond %{REQUEST_FILENAME} !-f
RewriteRule ^(.+)$ index.php [QSA,L]
`)
}
// await fs.writeFile(`${configuration.general.workdir}/.dockerignore`, 'node_modules')
if (configuration.build.pack === 'static') {
await fs.writeFile(
`${configuration.general.workdir}/nginx.conf`,
`user nginx;
worker_processes auto;
error_log /var/log/nginx/error.log warn; error_log /var/log/nginx/error.log warn;
pid /var/run/nginx.pid; pid /var/run/nginx.pid;
events { events {
worker_connections 1024; worker_connections 1024;
} }
http { http {
include /etc/nginx/mime.types; include /etc/nginx/mime.types;
access_log off; access_log off;
sendfile on; sendfile on;
#tcp_nopush on; #tcp_nopush on;
keepalive_timeout 65; keepalive_timeout 65;
server { server {
listen 80; listen 80;
server_name localhost; server_name localhost;
location / { location / {
root /usr/share/nginx/html; root /usr/share/nginx/html;
index index.html; index index.html;
try_files $uri $uri/index.html $uri/ /index.html =404; try_files $uri $uri/index.html $uri/ /index.html =404;
} }
error_page 404 /50x.html; error_page 404 /50x.html;
# redirect server error pages to the static page /50x.html # redirect server error pages to the static page /50x.html
# #
error_page 500 502 503 504 /50x.html; error_page 500 502 503 504 /50x.html;
location = /50x.html { location = /50x.html {
root /usr/share/nginx/html; root /usr/share/nginx/html;
} }
} }
} }
` `
) )
}
} catch (error) { } catch (error) {
throw { error, type: 'server' } throw { error, type: 'server' }
} }

View File

@@ -5,7 +5,7 @@ const { docker } = require('../../docker')
const { saveAppLog } = require('../../logging') const { saveAppLog } = require('../../logging')
const { deleteSameDeployments } = require('../cleanup') const { deleteSameDeployments } = require('../cleanup')
module.exports = async function (configuration, configChanged, imageChanged) { module.exports = async function (configuration, imageChanged) {
try { try {
const generateEnvs = {} const generateEnvs = {}
for (const secret of configuration.publish.secrets) { for (const secret of configuration.publish.secrets) {
@@ -62,7 +62,6 @@ module.exports = async function (configuration, configChanged, imageChanged) {
} }
await saveAppLog('### Publishing.', configuration) await saveAppLog('### Publishing.', configuration)
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack)) await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
// TODO: Compare stack.yml with the currently running one to upgrade if something changes, like restart_policy
if (imageChanged) { if (imageChanged) {
// console.log('image changed') // console.log('image changed')
await execShellAsync(`docker service update --image ${configuration.build.container.name}:${configuration.build.container.tag} ${configuration.build.container.name}_${configuration.build.container.name}`) await execShellAsync(`docker service update --image ${configuration.build.container.name}:${configuration.build.container.tag} ${configuration.build.container.name}_${configuration.build.container.name}`)

View File

@@ -8,10 +8,10 @@ const copyFiles = require('./deploy/copyFiles')
const buildContainer = require('./build/container') const buildContainer = require('./build/container')
const deploy = require('./deploy/deploy') const deploy = require('./deploy/deploy')
const Deployment = require('../../models/Deployment') const Deployment = require('../../models/Deployment')
const { cleanup, purgeOldThings } = require('./cleanup') const { cleanupStuckedDeploymentsInDB, purgeImagesContainers } = require('./cleanup')
const { updateServiceLabels } = require('./configuration') const { updateServiceLabels } = require('./configuration')
async function queueAndBuild (configuration, services, configChanged, imageChanged) { async function queueAndBuild (configuration, imageChanged) {
const { id, organization, name, branch } = configuration.repository const { id, organization, name, branch } = configuration.repository
const { domain } = configuration.publish const { domain } = configuration.publish
const { deployId, nickname, workdir } = configuration.general const { deployId, nickname, workdir } = configuration.general
@@ -22,15 +22,15 @@ async function queueAndBuild (configuration, services, configChanged, imageChang
await saveAppLog(`${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} Queued.`, configuration) await saveAppLog(`${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} Queued.`, configuration)
await copyFiles(configuration) await copyFiles(configuration)
await buildContainer(configuration) await buildContainer(configuration)
await deploy(configuration, configChanged, imageChanged) await deploy(configuration, imageChanged)
await Deployment.findOneAndUpdate( await Deployment.findOneAndUpdate(
{ repoId: id, branch, deployId, organization, name, domain }, { repoId: id, branch, deployId, organization, name, domain },
{ repoId: id, branch, deployId, organization, name, domain, progress: 'done' }) { repoId: id, branch, deployId, organization, name, domain, progress: 'done' })
await updateServiceLabels(configuration, services) await updateServiceLabels(configuration)
cleanupTmp(workdir) cleanupTmp(workdir)
await purgeOldThings() await purgeImagesContainers()
} catch (error) { } catch (error) {
await cleanup(configuration) await cleanupStuckedDeploymentsInDB(configuration)
cleanupTmp(workdir) cleanupTmp(workdir)
const { type } = error.error const { type } = error.error
if (type === 'app') { if (type === 'app') {

View File

@@ -15,12 +15,16 @@ function delay (t) {
} }
async function verifyUserId (authorization) { async function verifyUserId (authorization) {
const token = authorization.split(' ')[1] try {
const verify = jsonwebtoken.verify(token, process.env.JWT_SIGN_KEY) const token = authorization.split(' ')[1]
const found = await User.findOne({ uid: verify.jti }) const verify = jsonwebtoken.verify(token, process.env.JWT_SIGN_KEY)
if (found) { const found = await User.findOne({ uid: verify.jti })
return true if (found) {
} else { return true
} else {
return false
}
} catch (error) {
return false return false
} }
} }

View File

@@ -40,13 +40,17 @@ async function saveAppLog (event, configuration, isError) {
} }
async function saveServerLog ({ event, configuration, type }) { async function saveServerLog ({ event, configuration, type }) {
if (configuration) { try {
const deployId = configuration.general.deployId if (configuration) {
const repoId = configuration.repository.id const deployId = configuration.general.deployId
const branch = configuration.repository.branch const repoId = configuration.repository.id
await new ApplicationLog({ repoId, branch, deployId, event: `[SERVER ERROR 😖]: ${event}` }).save() const branch = configuration.repository.branch
await new ApplicationLog({ repoId, branch, deployId, event: `[SERVER ERROR 😖]: ${event}` }).save()
}
await new ServerLog({ event, type }).save()
} catch (error) {
// Hmm.
} }
await new ServerLog({ event, type }).save()
} }
module.exports = { module.exports = {

View File

@@ -1,15 +0,0 @@
const fs = require('fs').promises
const { streamEvents, docker } = require('../../libs/docker')
module.exports = async function (configuration) {
const path = `${configuration.general.workdir}/${configuration.build.directory ? configuration.build.directory : ''}`
if (fs.stat(`${path}/Dockerfile`)) {
const stream = await docker.engine.buildImage(
{ src: ['.'], context: path },
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
)
await streamEvents(stream, configuration)
} else {
throw { error: 'No custom dockerfile found.', type: 'app' }
}
}

View File

@@ -1,21 +0,0 @@
const fs = require('fs').promises
const { streamEvents, docker } = require('../../libs/docker')
const publishPHPDocker = (configuration) => {
return [
'FROM php:apache',
'WORKDIR /usr/src/app',
`COPY .${configuration.build.directory} /var/www/html`,
'EXPOSE 80',
' CMD ["apache2-foreground"]'
].join('\n')
}
module.exports = async function (configuration) {
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishPHPDocker(configuration))
const stream = await docker.engine.buildImage(
{ src: ['.'], context: configuration.general.workdir },
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
)
await streamEvents(stream, configuration)
}

View File

@@ -5,31 +5,36 @@ const { docker } = require('../../../libs/docker')
module.exports = async function (fastify) { module.exports = async function (fastify) {
fastify.post('/', async (request, reply) => { fastify.post('/', async (request, reply) => {
if (!await verifyUserId(request.headers.authorization)) { try {
reply.code(500).send({ error: 'Invalid request' }) if (!await verifyUserId(request.headers.authorization)) {
return reply.code(500).send({ error: 'Invalid request' })
} return
const configuration = setDefaultConfiguration(request.body) }
const configuration = setDefaultConfiguration(request.body)
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application') const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
let foundDomain = false let foundDomain = false
for (const service of services) { for (const service of services) {
const running = JSON.parse(service.Spec.Labels.configuration) const running = JSON.parse(service.Spec.Labels.configuration)
if (running) { if (running) {
if ( if (
running.publish.domain === configuration.publish.domain && running.publish.domain === configuration.publish.domain &&
running.repository.id !== configuration.repository.id running.repository.id !== configuration.repository.id &&
) { running.publish.path === configuration.publish.path
foundDomain = true ) {
foundDomain = true
}
} }
} }
if (fastify.config.DOMAIN === configuration.publish.domain) foundDomain = true
if (foundDomain) {
reply.code(500).send({ message: 'Domain already in use.' })
return
}
return { message: 'OK' }
} catch (error) {
throw { error, type: 'server' }
} }
if (fastify.config.DOMAIN === configuration.publish.domain) foundDomain = true
if (foundDomain) {
reply.code(500).send({ message: 'Domain already in use.' })
return
}
return { message: 'OK' }
}) })
} }

View File

@@ -1,8 +1,8 @@
const { verifyUserId, cleanupTmp, execShellAsync } = require('../../../../libs/common') const { verifyUserId, cleanupTmp } = require('../../../../libs/common')
const Deployment = require('../../../../models/Deployment') const Deployment = require('../../../../models/Deployment')
const { queueAndBuild } = require('../../../../libs/applications') const { queueAndBuild } = require('../../../../libs/applications')
const { setDefaultConfiguration } = require('../../../../libs/applications/configuration') const { setDefaultConfiguration, precheckDeployment } = require('../../../../libs/applications/configuration')
const { docker } = require('../../../../libs/docker') const { docker } = require('../../../../libs/docker')
const cloneRepository = require('../../../../libs/applications/github/cloneRepository') const cloneRepository = require('../../../../libs/applications/github/cloneRepository')
@@ -32,90 +32,43 @@ module.exports = async function (fastify) {
// }, // },
// }; // };
fastify.post('/', async (request, reply) => { fastify.post('/', async (request, reply) => {
if (!await verifyUserId(request.headers.authorization)) { try {
await verifyUserId(request.headers.authorization)
} catch (error) {
reply.code(500).send({ error: 'Invalid request' }) reply.code(500).send({ error: 'Invalid request' })
return return
} }
try {
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
const configuration = setDefaultConfiguration(request.body)
await cloneRepository(configuration)
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({ services, configuration })
const configuration = setDefaultConfiguration(request.body) if (foundService && !forceUpdate && !imageChanged && !configChanged) {
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
await cloneRepository(configuration)
let foundService = false
let foundDomain = false
let configChanged = false
let imageChanged = false
let forceUpdate = false
for (const service of services) {
const running = JSON.parse(service.Spec.Labels.configuration)
if (running) {
if (
running.publish.domain === configuration.publish.domain &&
running.repository.id !== configuration.repository.id
) {
foundDomain = true
}
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
// Base service configuration changed
if (!running.build.container.baseSHA || running.build.container.baseSHA !== configuration.build.container.baseSHA) {
configChanged = true
}
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running')
if (isError.length > 0) forceUpdate = true
foundService = true
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
delete runningWithoutContainer.build.container
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
delete configurationWithoutContainer.build.container
// If only the configuration changed
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
// If only the image changed
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
// If build pack changed, forceUpdate the service
if (running.build.pack !== configuration.build.pack) forceUpdate = true
}
}
}
if (foundDomain) {
cleanupTmp(configuration.general.workdir)
reply.code(500).send({ message: 'Domain already in use.' })
return
}
if (forceUpdate) {
imageChanged = false
configChanged = false
} else {
if (foundService && !imageChanged && !configChanged) {
cleanupTmp(configuration.general.workdir) cleanupTmp(configuration.general.workdir)
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' }) reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
return return
} }
const alreadyQueued = await Deployment.find({
repoId: configuration.repository.id,
branch: configuration.repository.branch,
organization: configuration.repository.organization,
name: configuration.repository.name,
domain: configuration.publish.domain,
progress: { $in: ['queued', 'inprogress'] }
})
if (alreadyQueued.length > 0) {
reply.code(200).send({ message: 'Already in the queue.' })
return
}
queueAndBuild(configuration, imageChanged)
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
} catch (error) {
throw { error, type: 'server' }
} }
const alreadyQueued = await Deployment.find({
repoId: configuration.repository.id,
branch: configuration.repository.branch,
organization: configuration.repository.organization,
name: configuration.repository.name,
domain: configuration.publish.domain,
progress: { $in: ['queued', 'inprogress'] }
})
if (alreadyQueued.length > 0) {
reply.code(200).send({ message: 'Already in the queue.' })
return
}
queueAndBuild(configuration, services, configChanged, imageChanged)
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
}) })
} }

View File

@@ -18,25 +18,29 @@ module.exports = async function (fastify) {
} }
} }
fastify.get('/', { schema: getLogSchema }, async (request, reply) => { fastify.get('/', { schema: getLogSchema }, async (request, reply) => {
const { repoId, branch, page } = request.query try {
const onePage = 5 const { repoId, branch, page } = request.query
const show = Number(page) * onePage || 5 const onePage = 5
const deploy = await Deployment.find({ repoId, branch }) const show = Number(page) * onePage || 5
.select('-_id -__v -repoId') const deploy = await Deployment.find({ repoId, branch })
.sort({ createdAt: 'desc' }) .select('-_id -__v -repoId')
.limit(show) .sort({ createdAt: 'desc' })
.limit(show)
const finalLogs = deploy.map(d => { const finalLogs = deploy.map(d => {
const finalLogs = { ...d._doc } const finalLogs = { ...d._doc }
const updatedAt = dayjs(d.updatedAt).utc() const updatedAt = dayjs(d.updatedAt).utc()
finalLogs.took = updatedAt.diff(dayjs(d.createdAt)) / 1000 finalLogs.took = updatedAt.diff(dayjs(d.createdAt)) / 1000
finalLogs.since = updatedAt.fromNow() finalLogs.since = updatedAt.fromNow()
return finalLogs
})
return finalLogs return finalLogs
}) } catch (error) {
return finalLogs throw { error, type: 'server' }
}
}) })
fastify.get('/:deployId', async (request, reply) => { fastify.get('/:deployId', async (request, reply) => {

View File

@@ -2,9 +2,13 @@ const { docker } = require('../../../libs/docker')
module.exports = async function (fastify) { module.exports = async function (fastify) {
fastify.get('/', async (request, reply) => { fastify.get('/', async (request, reply) => {
const { name } = request.query try {
const service = await docker.engine.getService(`${name}_${name}`) const { name } = request.query
const logs = (await service.logs({ stdout: true, stderr: true, timestamps: true })).toString().split('\n').map(l => l.slice(8)).filter((a) => a) const service = await docker.engine.getService(`${name}_${name}`)
return { logs } const logs = (await service.logs({ stdout: true, stderr: true, timestamps: true })).toString().split('\n').map(l => l.slice(8)).filter((a) => a)
return { logs }
} catch (error) {
throw { error, type: 'server' }
}
}) })
} }

View File

@@ -1,60 +1,6 @@
const { docker } = require('../../libs/docker') const { docker } = require('../../libs/docker')
module.exports = async function (fastify) { module.exports = async function (fastify) {
// const getConfig = {
// querystring: {
// type: 'object',
// properties: {
// repoId: { type: 'number' },
// branch: { type: 'string' }
// },
// required: ['repoId', 'branch']
// }
// }
// const saveConfig = {
// body: {
// type: 'object',
// properties: {
// build: {
// type: 'object',
// properties: {
// baseDir: { type: 'string' },
// installCmd: { type: 'string' },
// buildCmd: { type: 'string' }
// },
// required: ['baseDir', 'installCmd', 'buildCmd']
// },
// publish: {
// type: 'object',
// properties: {
// publishDir: { type: 'string' },
// domain: { type: 'string' },
// pathPrefix: { type: 'string' },
// port: { type: 'number' }
// },
// required: ['publishDir', 'domain', 'pathPrefix', 'port']
// },
// previewDeploy: { type: 'boolean' },
// branch: { type: 'string' },
// repoId: { type: 'number' },
// buildPack: { type: 'string' },
// fullName: { type: 'string' },
// installationId: { type: 'number' }
// },
// required: ['build', 'publish', 'previewDeploy', 'branch', 'repoId', 'buildPack', 'fullName', 'installationId']
// }
// }
// fastify.get("/all", async (request, reply) => {
// return await Config.find().select("-_id -__v");
// });
// fastify.get("/", { schema: getConfig }, async (request, reply) => {
// const { repoId, branch } = request.query;
// return await Config.findOne({ repoId, branch }).select("-_id -__v");
// });
fastify.post('/', async (request, reply) => { fastify.post('/', async (request, reply) => {
const { name, organization, branch } = request.body const { name, organization, branch } = request.body
const services = await docker.engine.listServices() const services = await docker.engine.listServices()
@@ -79,25 +25,4 @@ module.exports = async function (fastify) {
reply.code(500).send({ message: 'No configuration found.' }) reply.code(500).send({ message: 'No configuration found.' })
} }
}) })
// fastify.delete("/", async (request, reply) => {
// const { repoId, branch } = request.body;
// const deploys = await Deployment.find({ repoId, branch })
// const found = deploys.filter(d => d.progress !== 'done' && d.progress !== 'failed')
// if (found.length > 0) {
// throw new Error('Deployment inprogress, cannot delete now.');
// }
// const config = await Config.findOneAndDelete({ repoId, branch })
// for (const deploy of deploys) {
// await ApplicationLog.findOneAndRemove({ deployId: deploy.deployId });
// }
// const secrets = await Secret.find({ repoId, branch });
// for (const secret of secrets) {
// await Secret.findByIdAndRemove(secret._id);
// }
// await execShellAsync(`docker stack rm ${config.containerName}`);
// return { message: 'Deleted application and related configurations.' };
// });
} }

View File

@@ -42,7 +42,7 @@ module.exports = async function (fastify) {
r.Spec.Labels.configuration = configuration r.Spec.Labels.configuration = configuration
return r return r
}) })
applications = [...new Map(applications.map(item => [item.Spec.Labels.configuration.publish.domain, item])).values()] applications = [...new Map(applications.map(item => [item.Spec.Labels.configuration.publish.domain + item.Spec.Labels.configuration.publish.path, item])).values()]
return { return {
serverLogs, serverLogs,
applications: { applications: {
@@ -55,6 +55,8 @@ module.exports = async function (fastify) {
} catch (error) { } catch (error) {
if (error.code === 'ENOENT' && error.errno === -2) { if (error.code === 'ENOENT' && error.errno === -2) {
throw new Error(`Docker service unavailable at ${error.address}.`) throw new Error(`Docker service unavailable at ${error.address}.`)
} else {
throw { error, type: 'server' }
} }
} }
}) })

View File

@@ -45,124 +45,128 @@ module.exports = async function (fastify) {
} }
fastify.post('/deploy', { schema: postSchema }, async (request, reply) => { fastify.post('/deploy', { schema: postSchema }, async (request, reply) => {
let { type, defaultDatabaseName } = request.body try {
const passwords = generator.generateMultiple(2, { let { type, defaultDatabaseName } = request.body
length: 24, const passwords = generator.generateMultiple(2, {
numbers: true, length: 24,
strict: true numbers: true,
}) strict: true
const usernames = generator.generateMultiple(2, { })
length: 10, const usernames = generator.generateMultiple(2, {
numbers: true, length: 10,
strict: true numbers: true,
}) strict: true
// TODO: Query for existing db with the same name })
const nickname = getUniq() // TODO: Query for existing db with the same name
const nickname = getUniq()
if (!defaultDatabaseName) defaultDatabaseName = nickname if (!defaultDatabaseName) defaultDatabaseName = nickname
reply.code(201).send({ message: 'Deploying.' }) reply.code(201).send({ message: 'Deploying.' })
// TODO: Persistent volume, custom inputs // TODO: Persistent volume, custom inputs
const deployId = cuid() const deployId = cuid()
const configuration = { const configuration = {
general: { general: {
workdir: `/tmp/${deployId}`, workdir: `/tmp/${deployId}`,
deployId, deployId,
nickname, nickname,
type type
}, },
database: { database: {
usernames, usernames,
passwords, passwords,
defaultDatabaseName defaultDatabaseName
}, },
deploy: { deploy: {
name: nickname name: nickname
}
} }
} let generateEnvs = {}
let generateEnvs = {} let image = null
let image = null let volume = null
let volume = null if (type === 'mongodb') {
if (type === 'mongodb') { generateEnvs = {
generateEnvs = { MONGODB_ROOT_PASSWORD: passwords[0],
MONGODB_ROOT_PASSWORD: passwords[0], MONGODB_USERNAME: usernames[0],
MONGODB_USERNAME: usernames[0], MONGODB_PASSWORD: passwords[1],
MONGODB_PASSWORD: passwords[1], MONGODB_DATABASE: defaultDatabaseName
MONGODB_DATABASE: defaultDatabaseName }
image = 'bitnami/mongodb:4.4'
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mongodb`
} else if (type === 'postgresql') {
generateEnvs = {
POSTGRESQL_PASSWORD: passwords[0],
POSTGRESQL_USERNAME: usernames[0],
POSTGRESQL_DATABASE: defaultDatabaseName
}
image = 'bitnami/postgresql:13.2.0'
volume = `${configuration.general.deployId}-${type}-data:/bitnami/postgresql`
} else if (type === 'couchdb') {
generateEnvs = {
COUCHDB_PASSWORD: passwords[0],
COUCHDB_USER: usernames[0]
}
image = 'bitnami/couchdb:3'
volume = `${configuration.general.deployId}-${type}-data:/bitnami/couchdb`
} else if (type === 'mysql') {
generateEnvs = {
MYSQL_ROOT_PASSWORD: passwords[0],
MYSQL_ROOT_USER: usernames[0],
MYSQL_USER: usernames[1],
MYSQL_PASSWORD: passwords[1],
MYSQL_DATABASE: defaultDatabaseName
}
image = 'bitnami/mysql:8.0'
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mysql/data`
} }
image = 'bitnami/mongodb:4.4'
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mongodb`
} else if (type === 'postgresql') {
generateEnvs = {
POSTGRESQL_PASSWORD: passwords[0],
POSTGRESQL_USERNAME: usernames[0],
POSTGRESQL_DATABASE: defaultDatabaseName
}
image = 'bitnami/postgresql:13.2.0'
volume = `${configuration.general.deployId}-${type}-data:/bitnami/postgresql`
} else if (type === 'couchdb') {
generateEnvs = {
COUCHDB_PASSWORD: passwords[0],
COUCHDB_USER: usernames[0]
}
image = 'bitnami/couchdb:3'
volume = `${configuration.general.deployId}-${type}-data:/bitnami/couchdb`
} else if (type === 'mysql') {
generateEnvs = {
MYSQL_ROOT_PASSWORD: passwords[0],
MYSQL_ROOT_USER: usernames[0],
MYSQL_USER: usernames[1],
MYSQL_PASSWORD: passwords[1],
MYSQL_DATABASE: defaultDatabaseName
}
image = 'bitnami/mysql:8.0'
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mysql/data`
}
const stack = { const stack = {
version: '3.8', version: '3.8',
services: { services: {
[configuration.general.deployId]: { [configuration.general.deployId]: {
image, image,
networks: [`${docker.network}`], networks: [`${docker.network}`],
environment: generateEnvs, environment: generateEnvs,
volumes: [volume], volumes: [volume],
deploy: { deploy: {
replicas: 1, replicas: 1,
update_config: { update_config: {
parallelism: 0, parallelism: 0,
delay: '10s', delay: '10s',
order: 'start-first' order: 'start-first'
}, },
rollback_config: { rollback_config: {
parallelism: 0, parallelism: 0,
delay: '10s', delay: '10s',
order: 'start-first' order: 'start-first'
}, },
labels: [ labels: [
'managedBy=coolify', 'managedBy=coolify',
'type=database', 'type=database',
'configuration=' + JSON.stringify(configuration) 'configuration=' + JSON.stringify(configuration)
] ]
}
}
},
networks: {
[`${docker.network}`]: {
external: true
}
},
volumes: {
[`${configuration.general.deployId}-${type}-data`]: {
external: true
} }
} }
},
networks: {
[`${docker.network}`]: {
external: true
}
},
volumes: {
[`${configuration.general.deployId}-${type}-data`]: {
external: true
}
} }
await execShellAsync(`mkdir -p ${configuration.general.workdir}`)
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
await execShellAsync(
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy -c - ${configuration.general.deployId}`
)
} catch (error) {
throw { error, type: 'server' }
} }
await execShellAsync(`mkdir -p ${configuration.general.workdir}`)
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
await execShellAsync(
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy -c - ${configuration.general.deployId}`
)
}) })
fastify.delete('/:dbName', async (request, reply) => { fastify.delete('/:dbName', async (request, reply) => {

View File

@@ -0,0 +1,14 @@
const Server = require('../../../models/Logs/Server')
module.exports = async function (fastify) {
fastify.get('/', async (request, reply) => {
try {
const serverLogs = await Server.find().select('-_id -__v')
// TODO: Should do better
return {
serverLogs
}
} catch (error) {
throw { error, type: 'server' }
}
})
}

View File

@@ -25,7 +25,7 @@ module.exports = async function (fastify) {
settings settings
} }
} catch (error) { } catch (error) {
throw new Error(error) throw { error, type: 'server' }
} }
}) })
@@ -38,7 +38,7 @@ module.exports = async function (fastify) {
).select('-_id -__v') ).select('-_id -__v')
reply.code(201).send({ settings }) reply.code(201).send({ settings })
} catch (error) { } catch (error) {
throw new Error(error) throw { error, type: 'server' }
} }
}) })
} }

View File

@@ -3,10 +3,10 @@ const { saveServerLog } = require('../../../libs/logging')
module.exports = async function (fastify) { module.exports = async function (fastify) {
fastify.get('/', async (request, reply) => { fastify.get('/', async (request, reply) => {
const upgradeP1 = await execShellAsync('bash ./install.sh upgrade-phase-1') const upgradeP1 = await execShellAsync('bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p1.sh)"')
await saveServerLog({ event: upgradeP1, type: 'UPGRADE-P-1' }) await saveServerLog({ event: upgradeP1, type: 'UPGRADE-P-1' })
reply.code(200).send('I\'m trying, okay?') reply.code(200).send('I\'m trying, okay?')
const upgradeP2 = await execShellAsync('bash ./install.sh upgrade-phase-2') const upgradeP2 = await execShellAsync('docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -u root coolify bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p2.sh)"')
await saveServerLog({ event: upgradeP2, type: 'UPGRADE-P-2' }) await saveServerLog({ event: upgradeP2, type: 'UPGRADE-P-2' })
}) })
} }

View File

@@ -3,14 +3,18 @@ const jwt = require('jsonwebtoken')
module.exports = async function (fastify) { module.exports = async function (fastify) {
fastify.get('/', async (request, reply) => { fastify.get('/', async (request, reply) => {
const { authorization } = request.headers try {
if (!authorization) { const { authorization } = request.headers
if (!authorization) {
reply.code(401).send({})
return
}
const token = authorization.split(' ')[1]
const verify = jwt.verify(token, fastify.config.JWT_SIGN_KEY)
const found = await User.findOne({ uid: verify.jti })
found ? reply.code(200).send({}) : reply.code(401).send({})
} catch (error) {
reply.code(401).send({}) reply.code(401).send({})
return
} }
const token = authorization.split(' ')[1]
const verify = jwt.verify(token, fastify.config.JWT_SIGN_KEY)
const found = await User.findOne({ uid: verify.jti })
found ? reply.code(200).send({}) : reply.code(401).send({})
}) })
} }

View File

@@ -1,8 +1,8 @@
const crypto = require('crypto') const crypto = require('crypto')
const { cleanupTmp, execShellAsync } = require('../../../libs/common') const { cleanupTmp } = require('../../../libs/common')
const Deployment = require('../../../models/Deployment') const Deployment = require('../../../models/Deployment')
const { queueAndBuild } = require('../../../libs/applications') const { queueAndBuild } = require('../../../libs/applications')
const { setDefaultConfiguration } = require('../../../libs/applications/configuration') const { setDefaultConfiguration, precheckDeployment } = require('../../../libs/applications/configuration')
const { docker } = require('../../../libs/docker') const { docker } = require('../../../libs/docker')
const cloneRepository = require('../../../libs/applications/github/cloneRepository') const cloneRepository = require('../../../libs/applications/github/cloneRepository')
@@ -45,98 +45,55 @@ module.exports = async function (fastify) {
reply.code(500).send({ error: 'Not a push event.' }) reply.code(500).send({ error: 'Not a push event.' })
return return
} }
try {
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application') let configuration = services.find(r => {
if (request.body.ref.startsWith('refs')) {
let configuration = services.find(r => { const branch = request.body.ref.split('/')[2]
if (request.body.ref.startsWith('refs')) { if (
const branch = request.body.ref.split('/')[2] JSON.parse(r.Spec.Labels.configuration).repository.id === request.body.repository.id &&
if ( JSON.parse(r.Spec.Labels.configuration).repository.branch === branch
JSON.parse(r.Spec.Labels.configuration).repository.id === request.body.repository.id && ) {
JSON.parse(r.Spec.Labels.configuration).repository.branch === branch return r
) { }
return r
} }
return null
})
if (!configuration) {
reply.code(500).send({ error: 'No configuration found.' })
return
} }
return null configuration = setDefaultConfiguration(JSON.parse(configuration.Spec.Labels.configuration))
}) await cloneRepository(configuration)
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({ services, configuration })
if (!configuration) { if (foundService && !forceUpdate && !imageChanged && !configChanged) {
reply.code(500).send({ error: 'No configuration found.' })
return
}
configuration = setDefaultConfiguration(JSON.parse(configuration.Spec.Labels.configuration))
await cloneRepository(configuration)
let foundService = false
let foundDomain = false
let configChanged = false
let imageChanged = false
let forceUpdate = false
for (const service of services) {
const running = JSON.parse(service.Spec.Labels.configuration)
if (running) {
if (
running.publish.domain === configuration.publish.domain &&
running.repository.id !== configuration.repository.id &&
running.repository.branch !== configuration.repository.branch
) {
foundDomain = true
}
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running')
if (isError.length > 0) forceUpdate = true
foundService = true
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
delete runningWithoutContainer.build.container
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
delete configurationWithoutContainer.build.container
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
}
}
}
if (foundDomain) {
cleanupTmp(configuration.general.workdir)
reply.code(500).send({ message: 'Domain already used.' })
return
}
if (forceUpdate) {
imageChanged = false
configChanged = false
} else {
if (foundService && !imageChanged && !configChanged) {
cleanupTmp(configuration.general.workdir) cleanupTmp(configuration.general.workdir)
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' }) reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
return return
} }
const alreadyQueued = await Deployment.find({
repoId: configuration.repository.id,
branch: configuration.repository.branch,
organization: configuration.repository.organization,
name: configuration.repository.name,
domain: configuration.publish.domain,
progress: { $in: ['queued', 'inprogress'] }
})
if (alreadyQueued.length > 0) {
reply.code(200).send({ message: 'Already in the queue.' })
return
}
queueAndBuild(configuration, imageChanged)
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
} catch (error) {
throw { error, type: 'server' }
} }
const alreadyQueued = await Deployment.find({
repoId: configuration.repository.id,
branch: configuration.repository.branch,
organization: configuration.repository.organization,
name: configuration.repository.name,
domain: configuration.publish.domain,
progress: { $in: ['queued', 'inprogress'] }
})
if (alreadyQueued.length > 0) {
reply.code(200).send({ message: 'Already in the queue.' })
return
}
queueAndBuild(configuration, services, configChanged, imageChanged)
reply.code(201).send({ message: 'Deployment queued.' })
}) })
} }

View File

@@ -2,6 +2,8 @@ require('dotenv').config()
const fs = require('fs') const fs = require('fs')
const util = require('util') const util = require('util')
const { saveServerLog } = require('./libs/logging') const { saveServerLog } = require('./libs/logging')
const { execShellAsync } = require('./libs/common')
const { purgeImagesContainers, cleanupStuckedDeploymentsInDB } = require('./libs/applications/cleanup')
const Deployment = require('./models/Deployment') const Deployment = require('./models/Deployment')
const fastify = require('fastify')({ const fastify = require('fastify')({
logger: { level: 'error' } logger: { level: 'error' }
@@ -10,6 +12,10 @@ const mongoose = require('mongoose')
const path = require('path') const path = require('path')
const { schema } = require('./schema') const { schema } = require('./schema')
process.on('unhandledRejection', (reason, p) => {
console.log(reason)
console.log(p)
})
fastify.register(require('fastify-env'), { fastify.register(require('fastify-env'), {
schema, schema,
dotenv: true dotenv: true
@@ -31,13 +37,16 @@ if (process.env.NODE_ENV === 'production') {
fastify.register(require('./app'), { prefix: '/api/v1' }) fastify.register(require('./app'), { prefix: '/api/v1' })
fastify.setErrorHandler(async (error, request, reply) => { fastify.setErrorHandler(async (error, request, reply) => {
console.log({ error })
if (error.statusCode) { if (error.statusCode) {
reply.status(error.statusCode).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' }) reply.status(error.statusCode).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' })
} else { } else {
reply.status(500).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' }) reply.status(500).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' })
} }
await saveServerLog({ event: error }) try {
await saveServerLog({ event: error })
} catch (error) {
//
}
}) })
if (process.env.NODE_ENV === 'production') { if (process.env.NODE_ENV === 'production') {
@@ -83,8 +92,25 @@ mongoose.connection.once('open', async function () {
console.log('Coolify API is up and running in development.') console.log('Coolify API is up and running in development.')
} }
// On start cleanup inprogress/queued deployments. // On start cleanup inprogress/queued deployments.
const deployments = await Deployment.find({ progress: { $in: ['queued', 'inprogress'] } }) try {
for (const deployment of deployments) { await cleanupStuckedDeploymentsInDB()
await Deployment.findByIdAndUpdate(deployment._id, { $set: { progress: 'failed' } }) } catch (error) {
// Could not cleanup DB 🤔
}
try {
// Doing because I do not want to prune these images. Prune skip coolify-reserve labeled images.
const basicImages = ['nginx:stable-alpine', 'node:lts', 'ubuntu:20.04']
for (const image of basicImages) {
await execShellAsync(`echo "FROM ${image}" | docker build --label coolify-reserve=true -t ${image} -`)
}
} catch (error) {
console.log('Could not pull some basic images from Docker Hub.')
console.log(error)
}
try {
await purgeImagesContainers()
} catch (error) {
console.log('Could not purge containers/images.')
console.log(error)
} }
}) })

24
install/Dockerfile-new Normal file
View File

@@ -0,0 +1,24 @@
FROM ubuntu:20.04 as binaries
LABEL coolify-preserve=true
RUN apt update && apt install -y curl gnupg2 ca-certificates
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -o /usr/bin/jq
RUN chmod +x /usr/bin/envsubst /usr/bin/jq
RUN apt update && apt install -y docker-ce-cli && apt clean all
FROM node:lts
WORKDIR /usr/src/app
LABEL coolify-preserve=true
COPY --from=binaries /usr/bin/docker /usr/bin/docker
COPY --from=binaries /usr/bin/envsubst /usr/bin/envsubst
COPY --from=binaries /usr/bin/jq /usr/bin/jq
COPY . .
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm@6
RUN pnpm install
RUN pnpm build
RUN rm -fr node_modules .pnpm-store
RUN pnpm install -P
CMD ["pnpm", "start"]
EXPOSE 3000

10
install/README.md Normal file
View File

@@ -0,0 +1,10 @@
Some of the files are here for backwards compatibility.
I will do things after 2 months:
- rm ./install.js and ./update.js
- rm ../install.sh
- rm ./Dockerfile-base
- rm ./obs
- rm ./check.js "No need to check env file. During installation, it is checked by the installer. If you change it between to upgrades: 🤷‍♂️"
- Rename Dockerfile-new to Dockerfile

24
install/check.js Normal file
View File

@@ -0,0 +1,24 @@
require('dotenv').config()
const fastify = require('fastify')()
const { schema } = require('../api/schema')
checkConfig().then(() => {
console.log('Config: OK')
}).catch((err) => {
console.log('Config: NOT OK')
console.error(err)
process.exit(1)
})
function checkConfig () {
return new Promise((resolve, reject) => {
fastify.register(require('fastify-env'), {
schema,
dotenv: true
})
.ready((err) => {
if (err) reject(err)
resolve()
})
})
}

View File

@@ -22,6 +22,7 @@ services:
- --providers.docker.swarmMode=true - --providers.docker.swarmMode=true
- --providers.docker.exposedbydefault=false - --providers.docker.exposedbydefault=false
- --providers.docker.network=${DOCKER_NETWORK} - --providers.docker.network=${DOCKER_NETWORK}
- --providers.docker.swarmModeRefreshSeconds=1s
- --entrypoints.web.address=:80 - --entrypoints.web.address=:80
- --entrypoints.websecure.address=:443 - --entrypoints.websecure.address=:443
- --certificatesresolvers.letsencrypt.acme.httpchallenge=true - --certificatesresolvers.letsencrypt.acme.httpchallenge=true

View File

@@ -0,0 +1,4 @@
FROM coolify-base-nodejs
WORKDIR /usr/src/app
COPY . .
RUN pnpm install

View File

@@ -0,0 +1,6 @@
FROM node:lts
LABEL coolify-preserve=true
COPY --from=coolify-binaries /usr/bin/docker /usr/bin/docker
COPY --from=coolify-binaries /usr/bin/envsubst /usr/bin/envsubst
COPY --from=coolify-binaries /usr/bin/jq /usr/bin/jq
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm@6

View File

@@ -0,0 +1,9 @@
FROM ubuntu:20.04
LABEL coolify-preserve=true
RUN apt update && apt install -y curl gnupg2 ca-certificates
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -o /usr/bin/jq
RUN chmod +x /usr/bin/envsubst /usr/bin/jq
RUN apt update && apt install -y docker-ce-cli && apt clean all

View File

@@ -2,7 +2,6 @@ require('dotenv').config()
const { program } = require('commander') const { program } = require('commander')
const shell = require('shelljs') const shell = require('shelljs')
const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '') const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '')
program.version('0.0.1') program.version('0.0.1')
program program
.option('-d, --debug', 'Debug outputs.') .option('-d, --debug', 'Debug outputs.')

View File

@@ -1,7 +1,7 @@
{ {
"name": "coolify", "name": "coolify",
"description": "An open-source, hassle-free, self-hostable Heroku & Netlify alternative.", "description": "An open-source, hassle-free, self-hostable Heroku & Netlify alternative.",
"version": "1.0.5", "version": "1.0.6",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"scripts": { "scripts": {
"lint": "standard", "lint": "standard",
@@ -16,8 +16,9 @@
"build:svite": "svite build" "build:svite": "svite build"
}, },
"dependencies": { "dependencies": {
"@iarna/toml": "^2.2.5",
"@roxi/routify": "^2.15.1", "@roxi/routify": "^2.15.1",
"@zerodevx/svelte-toast": "^0.2.0", "@zerodevx/svelte-toast": "^0.2.1",
"axios": "^0.21.1", "axios": "^0.21.1",
"commander": "^7.2.0", "commander": "^7.2.0",
"compare-versions": "^3.6.0", "compare-versions": "^3.6.0",
@@ -26,7 +27,7 @@
"deepmerge": "^4.2.2", "deepmerge": "^4.2.2",
"dockerode": "^3.2.1", "dockerode": "^3.2.1",
"dotenv": "^8.2.0", "dotenv": "^8.2.0",
"fastify": "^3.14.1", "fastify": "^3.14.2",
"fastify-env": "^2.1.0", "fastify-env": "^2.1.0",
"fastify-jwt": "^2.4.0", "fastify-jwt": "^2.4.0",
"fastify-plugin": "^3.0.0", "fastify-plugin": "^3.0.0",
@@ -52,7 +53,7 @@
"standard": "^16.0.3", "standard": "^16.0.3",
"svelte": "^3.37.0", "svelte": "^3.37.0",
"svelte-hmr": "^0.14.0", "svelte-hmr": "^0.14.0",
"svelte-preprocess": "^4.6.1", "svelte-preprocess": "^4.7.0",
"svite": "0.8.1", "svite": "0.8.1",
"tailwindcss": "2.1.1" "tailwindcss": "2.1.1"
}, },

82
pnpm-lock.yaml generated
View File

@@ -1,17 +1,18 @@
lockfileVersion: 5.3 lockfileVersion: 5.3
specifiers: specifiers:
'@iarna/toml': ^2.2.5
'@roxi/routify': ^2.15.1 '@roxi/routify': ^2.15.1
'@zerodevx/svelte-toast': ^0.2.0 '@zerodevx/svelte-toast': ^0.2.1
axios: ^0.21.1 axios: ^0.21.1
commander: ^6.2.1 commander: ^7.2.0
compare-versions: ^3.6.0 compare-versions: ^3.6.0
cuid: ^2.1.8 cuid: ^2.1.8
dayjs: ^1.10.4 dayjs: ^1.10.4
deepmerge: ^4.2.2 deepmerge: ^4.2.2
dockerode: ^3.2.1 dockerode: ^3.2.1
dotenv: ^8.2.0 dotenv: ^8.2.0
fastify: ^3.14.1 fastify: ^3.14.2
fastify-env: ^2.1.0 fastify-env: ^2.1.0
fastify-jwt: ^2.4.0 fastify-jwt: ^2.4.0
fastify-plugin: ^3.0.0 fastify-plugin: ^3.0.0
@@ -33,24 +34,25 @@ specifiers:
standard: ^16.0.3 standard: ^16.0.3
svelte: ^3.37.0 svelte: ^3.37.0
svelte-hmr: ^0.14.0 svelte-hmr: ^0.14.0
svelte-preprocess: ^4.6.1 svelte-preprocess: ^4.7.0
svelte-select: ^3.17.0 svelte-select: ^3.17.0
svite: 0.8.1 svite: 0.8.1
tailwindcss: 2.1.1 tailwindcss: 2.1.1
unique-names-generator: ^4.4.0 unique-names-generator: ^4.4.0
dependencies: dependencies:
'@iarna/toml': 2.2.5
'@roxi/routify': 2.15.1 '@roxi/routify': 2.15.1
'@zerodevx/svelte-toast': 0.2.0 '@zerodevx/svelte-toast': 0.2.1
axios: 0.21.1 axios: 0.21.1
commander: 6.2.1 commander: 7.2.0
compare-versions: 3.6.0 compare-versions: 3.6.0
cuid: 2.1.8 cuid: 2.1.8
dayjs: 1.10.4 dayjs: 1.10.4
deepmerge: 4.2.2 deepmerge: 4.2.2
dockerode: 3.2.1 dockerode: 3.2.1
dotenv: 8.2.0 dotenv: 8.2.0
fastify: 3.14.1 fastify: 3.14.2
fastify-env: 2.1.0 fastify-env: 2.1.0
fastify-jwt: 2.4.0 fastify-jwt: 2.4.0
fastify-plugin: 3.0.0 fastify-plugin: 3.0.0
@@ -76,7 +78,7 @@ devDependencies:
standard: 16.0.3 standard: 16.0.3
svelte: 3.37.0 svelte: 3.37.0
svelte-hmr: 0.14.0_svelte@3.37.0 svelte-hmr: 0.14.0_svelte@3.37.0
svelte-preprocess: 4.6.9_fa8d64e4f515eee295d8f0f45fceadd2 svelte-preprocess: 4.7.0_fa8d64e4f515eee295d8f0f45fceadd2
svite: 0.8.1_d334b093211aa94b4e678204453b11ae svite: 0.8.1_d334b093211aa94b4e678204453b11ae
tailwindcss: 2.1.1_postcss@8.2.9 tailwindcss: 2.1.1_postcss@8.2.9
@@ -155,6 +157,10 @@ packages:
purgecss: 3.1.3 purgecss: 3.1.3
dev: true dev: true
/@iarna/toml/2.2.5:
resolution: {integrity: sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==}
dev: false
/@koa/cors/3.1.0: /@koa/cors/3.1.0:
resolution: {integrity: sha512-7ulRC1da/rBa6kj6P4g2aJfnET3z8Uf3SWu60cjbtxTA5g8lxRdX/Bd2P92EagGwwAhANeNw8T8if99rJliR6Q==} resolution: {integrity: sha512-7ulRC1da/rBa6kj6P4g2aJfnET3z8Uf3SWu60cjbtxTA5g8lxRdX/Bd2P92EagGwwAhANeNw8T8if99rJliR6Q==}
engines: {node: '>= 8.0.0'} engines: {node: '>= 8.0.0'}
@@ -553,8 +559,8 @@ packages:
resolution: {integrity: sha512-dn5FyfSc4ky424jH4FntiHno7Ss5yLkqKNmM/NXwANRnlkmqu74pnGetexDFVG5phMk9/FhwovUZCWGxsotVKg==} resolution: {integrity: sha512-dn5FyfSc4ky424jH4FntiHno7Ss5yLkqKNmM/NXwANRnlkmqu74pnGetexDFVG5phMk9/FhwovUZCWGxsotVKg==}
dev: true dev: true
/@zerodevx/svelte-toast/0.2.0: /@zerodevx/svelte-toast/0.2.1:
resolution: {integrity: sha512-zfnu02ZwAxpXfiqvAIZY97+Bv2hsBJ2fJlK/CVxliVu/+1I/R/z5Deo2BUtaLWmKAZX29FtFN9IBjF9hmPHQTA==} resolution: {integrity: sha512-3yOusE+/xDaVNxkBJwbxDZea5ePQ77B15tbHv6ZlSYtlJu0u0PDhGMu8eoI+SmcCt4j+2sf0A1uS9+LcBIqUgg==}
dev: false dev: false
/abab/2.0.5: /abab/2.0.5:
@@ -1167,6 +1173,7 @@ packages:
/commander/6.2.1: /commander/6.2.1:
resolution: {integrity: sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==} resolution: {integrity: sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==}
engines: {node: '>= 6'} engines: {node: '>= 6'}
dev: true
/commander/7.2.0: /commander/7.2.0:
resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==}
@@ -2163,8 +2170,8 @@ packages:
resolution: {integrity: sha512-s1EQguBw/9qtc1p/WTY4eq9WMRIACkj+HTcOIK1in4MV5aFaQC9ZCIt0dJ7pr5bIf4lPpHvAtP2ywpTNgs7hqw==} resolution: {integrity: sha512-s1EQguBw/9qtc1p/WTY4eq9WMRIACkj+HTcOIK1in4MV5aFaQC9ZCIt0dJ7pr5bIf4lPpHvAtP2ywpTNgs7hqw==}
dev: false dev: false
/fastify/3.14.1: /fastify/3.14.2:
resolution: {integrity: sha512-9hoK1vvopsUJnUJpge90t8PZIqNQhGM54yDrd2veCZLkxh8eipnaHrXe2+f7tIt6UScUZ92JZQavxFGB4HX7xA==} resolution: {integrity: sha512-/PY//7gJnGxLQORaRHCEW148vpFKFpBIQNz1Yo/DxbHuk5EQqK2comzyE2ug8FSEldDX8nleapTshl0m78Px2w==}
engines: {node: '>=10.16.0'} engines: {node: '>=10.16.0'}
dependencies: dependencies:
'@fastify/proxy-addr': 3.0.0 '@fastify/proxy-addr': 3.0.0
@@ -5770,6 +5777,57 @@ packages:
strip-indent: 3.0.0 strip-indent: 3.0.0
svelte: 3.37.0 svelte: 3.37.0
dev: true dev: true
optional: true
/svelte-preprocess/4.7.0_fa8d64e4f515eee295d8f0f45fceadd2:
resolution: {integrity: sha512-iNrY4YGqi0LD2e6oT9YbdSzOKntxk8gmzfqso1z/lUJOZh4o6fyIqkirmiZ8/dDJFqtIE1spVgDFWgkfhLEYlw==}
engines: {node: '>= 9.11.2'}
requiresBuild: true
peerDependencies:
'@babel/core': ^7.10.2
coffeescript: ^2.5.1
less: ^3.11.3
node-sass: '*'
postcss: ^7 || ^8
postcss-load-config: ^2.1.0 || ^3.0.0
pug: ^3.0.0
sass: ^1.26.8
stylus: ^0.54.7
sugarss: ^2.0.0
svelte: ^3.23.0
typescript: ^3.9.5 || ^4.0.0
peerDependenciesMeta:
'@babel/core':
optional: true
coffeescript:
optional: true
less:
optional: true
node-sass:
optional: true
postcss:
optional: true
postcss-load-config:
optional: true
pug:
optional: true
sass:
optional: true
stylus:
optional: true
sugarss:
optional: true
typescript:
optional: true
dependencies:
'@types/pug': 2.0.4
'@types/sass': 1.16.0
detect-indent: 6.0.0
postcss: 8.2.9
postcss-load-config: 3.0.1
strip-indent: 3.0.0
svelte: 3.37.0
dev: true
/svelte-select/3.17.0: /svelte-select/3.17.0:
resolution: {integrity: sha512-ITmX/XUiSdkaILmsTviKRkZPaXckM5/FA7Y8BhiUPoamaZG/ZDyOo6ydjFu9fDVFTbwoAUGUi6HBjs+ZdK2AwA==} resolution: {integrity: sha512-ITmX/XUiSdkaILmsTviKRkZPaXckM5/FA7Y8BhiUPoamaZG/ZDyOo6ydjFu9fDVFTbwoAUGUi6HBjs+ZdK2AwA==}

View File

@@ -3,8 +3,7 @@
import { Router } from "@roxi/routify"; import { Router } from "@roxi/routify";
import { routes } from "../.routify/routes"; import { routes } from "../.routify/routes";
const options = { const options = {
duration: 5000, duration: 2000
dismissable: true
}; };
</script> </script>

View File

@@ -1,6 +1,7 @@
<script> <script>
import { application} from "@store"; import { application} from "@store";
import TooltipInfo from "../../../Tooltip/TooltipInfo.svelte"; import TooltipInfo from "../../../Tooltip/TooltipInfo.svelte";
const showPorts = ['nodejs','custom','rust']
</script> </script>
<div> <div>
@@ -26,6 +27,11 @@
size="large" size="large"
label="Published as a PHP application." label="Published as a PHP application."
/> />
{:else if $application.build.pack === 'rust'}
<TooltipInfo
size="large"
label="Published as a Rust application."
/>
{/if} {/if}
</label </label
@@ -35,6 +41,7 @@
<option class="font-bold">nodejs</option> <option class="font-bold">nodejs</option>
<option class="font-bold">php</option> <option class="font-bold">php</option>
<option class="font-bold">custom</option> <option class="font-bold">custom</option>
<option class="font-bold">rust</option>
</select> </select>
</div> </div>
<div <div
@@ -66,7 +73,7 @@
/> />
</div> </div>
</div> </div>
{#if $application.build.pack === "nodejs" || $application.build.pack === "custom"} {#if showPorts.includes($application.build.pack)}
<label for="Port" >Port</label> <label for="Port" >Port</label>
<input <input
id="Port" id="Port"

View File

@@ -29,7 +29,7 @@
async function loadBranches() { async function loadBranches() {
loading.branches = true; loading.branches = true;
if ($isActive("/application/new")) $application.repository.branch = null if ($isActive("/application/new")) $application.repository.branch = null;
const selectedRepository = repositories.find( const selectedRepository = repositories.find(
r => r.id === $application.repository.id, r => r.id === $application.repository.id,
); );
@@ -54,6 +54,7 @@
} }
async function loadGithub() { async function loadGithub() {
loading.github = true;
try { try {
const { installations } = await $fetch( const { installations } = await $fetch(
"https://api.github.com/user/installations", "https://api.github.com/user/installations",
@@ -100,7 +101,6 @@
} finally { } finally {
loading.github = false; loading.github = false;
} }
} }
function modifyGithubAppConfig() { function modifyGithubAppConfig() {
const left = screen.width / 2 - 1020 / 2; const left = screen.width / 2 - 1020 / 2;
@@ -144,6 +144,52 @@
} }
</script> </script>
{#if !$isActive("/application/new")}
<div class="min-h-full text-white">
<div
class="py-5 text-left px-6 text-3xl tracking-tight font-bold flex items-center"
>
<a
target="_blank"
class="text-green-500 hover:underline cursor-pointer px-2"
href="{'https://' +
$application.publish.domain +
$application.publish.path}"
>{$application.publish.domain
? `${$application.publish.domain}${$application.publish.path !== '/' ? $application.publish.path : ''}`
: "Loading..."}</a
>
<a
target="_blank"
class="icon"
href="{`https://github.com/${$application.repository.organization}/${$application.repository.name}`}"
>
<svg
class="w-6"
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
><path
d="M9 19c-5 1.5-5-2.5-7-3m14 6v-3.87a3.37 3.37 0 0 0-.94-2.61c3.14-.35 6.44-1.54 6.44-7A5.44 5.44 0 0 0 20 4.77 5.07 5.07 0 0 0 19.91 1S18.73.65 16 2.48a13.38 13.38 0 0 0-7 0C6.27.65 5.09 1 5.09 1A5.07 5.07 0 0 0 5 4.77a5.44 5.44 0 0 0-1.5 3.78c0 5.42 3.3 6.61 6.44 7A3.37 3.37 0 0 0 9 18.13V22"
></path></svg
></a
>
</div>
</div>
{:else if $isActive("/application/new")}
<div class="min-h-full text-white">
<div
class="py-5 text-left px-6 text-3xl tracking-tight font-bold flex items-center"
>
New Application
</div>
</div>
{/if}
<div in:fade="{{ duration: 100 }}"> <div in:fade="{{ duration: 100 }}">
{#if !$session.githubAppToken} {#if !$session.githubAppToken}
<Login /> <Login />

View File

@@ -8,7 +8,7 @@
import BuildStep from "./ActiveTab/BuildStep.svelte"; import BuildStep from "./ActiveTab/BuildStep.svelte";
import Secrets from "./ActiveTab/Secrets.svelte"; import Secrets from "./ActiveTab/Secrets.svelte";
import Loading from "../../Loading.svelte"; import Loading from "../../Loading.svelte";
const buildPhaseActive = ["nodejs", "static"];
let loading = false; let loading = false;
onMount(async () => { onMount(async () => {
if (!$isActive("/application/new")) { if (!$isActive("/application/new")) {
@@ -27,8 +27,8 @@
}); });
} else { } else {
loading = true; loading = true;
$deployments?.applications?.deployed.filter(d => { $deployments?.applications?.deployed.find(d => {
const conf = d?.Spec?.Labels.application; const conf = d?.Spec?.Labels.configuration;
if ( if (
conf?.repository?.organization === conf?.repository?.organization ===
$application.repository.organization && $application.repository.organization &&
@@ -40,6 +40,7 @@
organization: $application.repository.organization, organization: $application.repository.organization,
branch: $application.repository.branch, branch: $application.repository.branch,
}); });
toast.push("This repository & branch is already defined. Redirecting...");
} }
}); });
try { try {
@@ -52,6 +53,9 @@
const Dockerfile = dir.find( const Dockerfile = dir.find(
f => f.type === "file" && f.name === "Dockerfile", f => f.type === "file" && f.name === "Dockerfile",
); );
const CargoToml = dir.find(
f => f.type === "file" && f.name === "Cargo.toml",
);
if (Dockerfile) { if (Dockerfile) {
$application.build.pack = "custom"; $application.build.pack = "custom";
@@ -60,7 +64,7 @@
const { content } = await $fetch(packageJson.git_url); const { content } = await $fetch(packageJson.git_url);
const packageJsonContent = JSON.parse(atob(content)); const packageJsonContent = JSON.parse(atob(content));
const checkPackageJSONContents = dep => { const checkPackageJSONContents = dep => {
return( return (
packageJsonContent?.dependencies?.hasOwnProperty(dep) || packageJsonContent?.dependencies?.hasOwnProperty(dep) ||
packageJsonContent?.devDependencies?.hasOwnProperty(dep) packageJsonContent?.devDependencies?.hasOwnProperty(dep)
); );
@@ -87,13 +91,12 @@
) { ) {
$application.build.command.build = config.build; $application.build.command.build = config.build;
} }
toast.push( toast.push(`${config.name} App detected. Default values set.`);
`${config.name} App detected. Default values set.`,
);
} }
}); });
} else if (CargoToml) {
$application.build.pack = "rust";
toast.push(`Rust language detected. Default values set.`);
} }
} catch (error) { } catch (error) {
// Nothing detected // Nothing detected
@@ -133,7 +136,7 @@
> >
General General
</div> </div>
{#if $application.build.pack === "php"} {#if !buildPhaseActive.includes($application.build.pack)}
<div disabled class="px-3 py-2 text-warmGray-700 cursor-not-allowed"> <div disabled class="px-3 py-2 text-warmGray-700 cursor-not-allowed">
Build Step Build Step
</div> </div>
@@ -146,14 +149,19 @@
Build Step Build Step
</div> </div>
{/if} {/if}
{#if $application.build.pack === "custom"}
<div <div disabled class="px-3 py-2 text-warmGray-700 cursor-not-allowed">
on:click="{() => activateTab('secrets')}" Secrets
class:text-green-500="{activeTab.secrets}" </div>
class="px-3 py-2 cursor-pointer hover:text-green-500" {:else}
> <div
Secrets on:click="{() => activateTab('secrets')}"
</div> class:text-green-500="{activeTab.secrets}"
class="px-3 py-2 cursor-pointer hover:text-green-500"
>
Secrets
</div>
{/if}
</nav> </nav>
</div> </div>
<div class="max-w-4xl mx-auto"> <div class="max-w-4xl mx-auto">

View File

@@ -64,18 +64,21 @@
} }
} }
async function checkUpgrade() { async function checkUpgrade() {
latest = await window
.fetch(`https://get.coollabs.io/version.json`, {
cache: "no-cache",
})
.then(r => r.json());
const branch = const branch =
process.env.NODE_ENV === "production" && process.env.NODE_ENV === "production" &&
window.location.hostname !== "test.andrasbacsai.dev" window.location.hostname !== "test.andrasbacsai.dev"
? "main" ? "main"
: "next"; : "next";
latest = await window
.fetch( return compareVersions(
`https://raw.githubusercontent.com/coollabsio/coolify/${branch}/package.json`, latest.coolify[branch].version,
{ cache: "no-cache" }, packageJson.version,
) ) === 1
.then(r => r.json());
return compareVersions(latest.version, packageJson.version) === 1
? true ? true
: false; : false;
} }

View File

@@ -1,38 +1,5 @@
<script> <script>
import { application } from "@store";
import Configuration from "../../../../../components/Application/Configuration/Configuration.svelte"; import Configuration from "../../../../../components/Application/Configuration/Configuration.svelte";
</script> </script>
<div class="min-h-full text-white">
<div
class="py-5 text-left px-6 text-3xl tracking-tight font-bold flex items-center"
>
<a
target="_blank"
class="text-green-500 hover:underline cursor-pointer px-2"
href="{'https://' +
$application.publish.domain +
$application.publish.path}">{$application.publish.domain}</a
>
<a
target="_blank"
class="icon"
href="{`https://github.com/${$application.repository.organization}/${$application.repository.name}`}"
>
<svg
class="w-6"
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
><path
d="M9 19c-5 1.5-5-2.5-7-3m14 6v-3.87a3.37 3.37 0 0 0-.94-2.61c3.14-.35 6.44-1.54 6.44-7A5.44 5.44 0 0 0 20 4.77 5.07 5.07 0 0 0 19.91 1S18.73.65 16 2.48a13.38 13.38 0 0 0-7 0C6.27.65 5.09 1 5.09 1A5.07 5.07 0 0 0 5 4.77a5.44 5.44 0 0 0-1.5 3.78c0 5.42 3.3 6.61 6.44 7A3.37 3.37 0 0 0 9 18.13V22"
></path></svg
></a
>
</div>
</div>
<Configuration /> <Configuration />

View File

@@ -38,12 +38,12 @@
<Loading /> <Loading />
{:then} {:then}
<div <div
class="text-center space-y-2 max-w-7xl mx-auto px-6" class="text-center px-6"
in:fade="{{ duration: 100 }}" in:fade="{{ duration: 100 }}"
> >
<div class="max-w-4xl mx-auto" in:fade="{{ duration: 100 }}"> <div in:fade="{{ duration: 100 }}">
<pre <pre
class="text-left font-mono text-xs font-medium tracking-tighter rounded-lg bg-warmGray-800 p-4 whitespace-pre-wrap"> class="leading-4 text-left text-sm font-semibold tracking-tighter rounded-lg bg-black p-6 whitespace-pre-wrap">
{#if logs.length > 0} {#if logs.length > 0}
{#each logs as log} {#each logs as log}
{log + '\n'} {log + '\n'}

View File

@@ -59,17 +59,17 @@
<Loading /> <Loading />
{:then} {:then}
<div <div
class="text-center space-y-2 max-w-7xl mx-auto px-6" class="text-center px-6"
in:fade="{{ duration: 100 }}" in:fade="{{ duration: 100 }}"
> >
<div class="flex pt-2 space-x-4 w-full"> <div class="flex pt-2 space-x-4 w-full">
<div class="w-full"> <div class="w-full">
<div class="font-bold text-left pb-2 text-xl">Application logs</div> <div class="font-bold text-left pb-2 text-xl">Application logs</div>
{#if logs.length === 0} {#if logs.length === 0}
<div class="text-xs">Waiting for the logs...</div> <div class="text-xs font-semibold tracking-tighter">Waiting for the logs...</div>
{:else} {:else}
<pre <pre
class="text-left font-mono text-xs font-medium rounded bg-warmGray-800 text-white p-4 whitespace-pre-wrap w-full"> class="leading-4 text-left text-sm font-semibold tracking-tighter rounded-lg bg-black p-6 whitespace-pre-wrap w-full">
{#each logs as log} {#each logs as log}
{log + '\n'} {log + '\n'}
{/each} {/each}

View File

@@ -2,12 +2,4 @@
import Configuration from "../../components/Application/Configuration/Configuration.svelte"; import Configuration from "../../components/Application/Configuration/Configuration.svelte";
</script> </script>
<div class="min-h-full text-white">
<div
class="py-5 text-left px-6 text-3xl tracking-tight font-bold flex items-center"
>
New Application
</div>
</div>
<Configuration /> <Configuration />

File diff suppressed because one or more lines are too long

View File

@@ -16,7 +16,7 @@ module.exports = {
], ],
preserveHtmlElements: true, preserveHtmlElements: true,
options: { options: {
safelist: [/svelte-/, 'border-green-500', 'border-yellow-300', 'border-red-500'], safelist: [/svelte-/, 'border-green-500', 'border-yellow-300', 'border-red-500', 'hover:border-green-500', 'hover:border-red-200', 'hover:bg-red-200'],
defaultExtractor: (content) => { defaultExtractor: (content) => {
// WARNING: tailwindExtractor is internal tailwind api // WARNING: tailwindExtractor is internal tailwind api
// if this breaks after a tailwind update, report to svite repo // if this breaks after a tailwind update, report to svite repo

View File

@@ -26,7 +26,8 @@ module.exports = {
'@zerodevx/svelte-toast', '@zerodevx/svelte-toast',
'mongodb-memory-server-core', 'mongodb-memory-server-core',
'unique-names-generator', 'unique-names-generator',
'generate-password' 'generate-password',
'@iarna/toml'
] ]
}, },
proxy: { proxy: {