This commit is contained in:
Dominic Grimm 2023-05-21 18:58:10 +02:00
commit e82f35da2a
No known key found for this signature in database
GPG key ID: B6FFE500AAD54A3A
78 changed files with 10821 additions and 0 deletions

View file

@ -0,0 +1,3 @@
[target.x86_64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=/usr/bin/mold"]

10
backend/.dockerignore Normal file
View file

@ -0,0 +1,10 @@
/docs/
/lib/
/bin/
/.shards/
*.dwarf
*.env
/examples/
Dockerfile
.dockerignore
README.md

17
backend/.editorconfig Normal file
View file

@ -0,0 +1,17 @@
root = true
[*.rs]
charset = utf-8
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
[*.sql]
charset = utf-8
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 4
trim_trailing_whitespace = true

1
backend/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/target

6
backend/.sqlfluff Normal file
View file

@ -0,0 +1,6 @@
[sqlfluff]
dialect = postgres
exclude_rules = LT05
[sqlfluff:indentation]
tab_space_size = 4

3877
backend/Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

54
backend/Cargo.toml Normal file
View file

@ -0,0 +1,54 @@
[package]
name = "gitea_pages"
version = "0.1.0"
edition = "2021"
[profile.release]
opt-level = 3
lto = true
codegen-units = 1
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
actix-cors = "0.6.4"
actix-web = "4.3.1"
anyhow = { version = "1.0.71", features = ["backtrace"] }
askama = "0.12.0"
async-trait = "0.1.68"
async_once = "0.2.6"
bb8 = "0.8.0"
celery = "0.5.3"
chrono = "0.4.24"
clap = { version = "4.2.7", features = ["derive"] }
dataloader = "0.16.0"
debug-ignore = "1.0.5"
diesel = { version = "2.0.4", features = [
"postgres",
"chrono",
"r2d2",
"uuid",
] }
env_logger = "0.10.0"
envconfig = "0.10.0"
git2 = "0.17.1"
gritea = "0.1.8"
hex-simd = "0.8.0"
http = "0.2.9"
http-auth-basic = "0.3.3"
juniper = { version = "0.15.11", features = ["uuid"] }
juniper_actix = "0.4.0"
lazy_static = "1.4.0"
log = "0.4.17"
ring = "0.16.20"
serde = { version = "1.0.160", features = ["derive"] }
serde_json = "1.0.96"
stdext = "0.3.1"
tokio = { version = "1.28.0", features = ["full"] }
url = "2.3.1"
urlencoding = "2.1.2"
uuid-simd = "0.8.0"
uuidv7 = { version = "1.3.2", package = "uuid", features = ["serde"] }
[target.'cfg(not(target_env = "msvc"))'.dependencies]
tikv-jemallocator = "0.5"

81
backend/Dockerfile Normal file
View file

@ -0,0 +1,81 @@
# FROM docker.io/lukemathwalker/cargo-chef:latest-rust-1.69.0 as mold
# SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# WORKDIR /tmp
# ARG MOLD_VERSION="1.11.0"
# RUN wget -qO- https://github.com/rui314/mold/archive/refs/tags/v${MOLD_VERSION}.tar.gz | tar zxf -
# WORKDIR /tmp/mold-${MOLD_VERSION}/build
# RUN ../install-build-deps.sh \
# && cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=c++ .. \
# && cmake --build . -j "$(nproc)" \
# && cmake --install .
FROM docker.io/lukemathwalker/cargo-chef:latest-rust-1.69.0 as chef
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# hadolint ignore=DL3009
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
lsb-release=11.1.0 \
wget=1.21-1+deb11u1 \
software-properties-common=0.96.20.2-2.1 \
gnupg=2.2.27-2+deb11u2 \
clang=1:11.0-51+nmu5
WORKDIR /tmp
ARG MOLD_VERSION="1.11.0"
RUN wget -qO- https://github.com/rui314/mold/releases/download/v${MOLD_VERSION}/mold-${MOLD_VERSION}-x86_64-linux.tar.gz | tar xzf - \
&& cp -RT ./mold-${MOLD_VERSION}-x86_64-linux /usr \
&& rm -rf ./mold-${MOLD_VERSION}-x86_64-linux
WORKDIR /
FROM chef as diesel
RUN cargo install diesel_cli --version 2.0.1 --no-default-features --features postgres
FROM chef as planner
WORKDIR /usr/src/gitea_pages
RUN mkdir -p ./src/bin && touch ./src/main.rs
COPY ./Cargo.toml ./Cargo.lock ./
RUN cargo chef prepare --recipe-path recipe.json
FROM chef as builder
WORKDIR /usr/src/gitea_pages
COPY ./.cargo ./.cargo
COPY --from=planner /usr/src/gitea_pages/recipe.json .
RUN cargo chef cook --release --recipe-path recipe.json
COPY --from=planner /usr/src/gitea_pages/Cargo.toml /usr/src/gitea_pages/Cargo.lock ./
# RUN cargo build --release --frozen --offline
COPY ./assets ./assets
COPY ./templates ./templates
COPY ./src ./src
RUN cargo build --release --frozen --offline
FROM docker.io/debian:bullseye-slim as runner
LABEL maintainer="Dominic Grimm <dominic@dergrimm.net>" \
org.opencontainers.image.description="Gitea Pages" \
org.opencontainers.image.licenses="GPLv3" \
org.opencontainers.image.source="https://git.dergrimm.net/dergrimm/gitea_pages" \
org.opencontainers.image.url="https://git.dergrimm.net/dergrimm/gitea_pages"
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN apt-get update && \
apt-get install --no-install-recommends -y \
libpq5=13.10-0+deb11u1 \
git=1:2.30.2-1+deb11u2 \
netcat=1.10-46 \
ca-certificates=20210119 \
wget=1.21-1+deb11u1 && \
wget -qO- https://get.docker.com/ | sh && \
apt-get clean && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/* && \
rm -rf /var/lib/apt/ && \
rm -rf /var/lib/dpkg/ && \
rm -rf /var/lib/cache/ && \
rm -rf /var/lib/log/
WORKDIR /usr/local/bin
COPY --from=diesel /usr/local/cargo/bin/diesel .
WORKDIR /opt/gitea_pages
RUN wget -q --show-progress https://raw.githubusercontent.com/vishnubob/wait-for-it/81b1373f17855a4dc21156cfe1694c31d7d1792e/wait-for-it.sh && \
chmod +x wait-for-it.sh
COPY ./run.sh ./migrate.sh ./
RUN chmod +x ./run.sh ./migrate.sh
COPY ./migrations ./migrations
COPY --from=builder /usr/src/gitea_pages/target/release/gitea_pages ./bin/gitea_pages
EXPOSE 8080 8081

8
backend/assets/logo.txt Normal file
View file

@ -0,0 +1,8 @@
_ _
(_) |
__ _ _| |_ ___ __ _ _ __ __ _ __ _ ___ ___
/ _` | | __/ _ \/ _` | | '_ \ / _` |/ _` |/ _ \/ __|
| (_| | | || __/ (_| | | |_) | (_| | (_| | __/\__ \
\__, |_|\__\___|\__,_| | .__/ \__,_|\__, |\___||___/
__/ | | | __/ |
|___/ |_| |___/

10
backend/migrate.sh Normal file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env sh
# -*- coding: utf-8 -*-
set -e
DATABASE_URL="$PAGES_DB_URL" diesel migration run \
--migration-dir ./migrations \
--locked-schema
while true; do nc -lv 8881; done

View file

@ -0,0 +1,3 @@
DROP TABLE repositories;
DROP TABLE users;

View file

@ -0,0 +1,17 @@
CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE TABLE users (
id uuid PRIMARY KEY DEFAULT GEN_RANDOM_UUID(),
name text NOT NULL UNIQUE,
created_at timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at timestamptz
);
CREATE TABLE repositories (
id uuid PRIMARY KEY DEFAULT GEN_RANDOM_UUID(),
user_id uuid NOT NULL REFERENCES users (id) ON DELETE CASCADE,
name text NOT NULL,
created_at timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at timestamptz,
UNIQUE (user_id, name)
);

8
backend/run.sh Normal file
View file

@ -0,0 +1,8 @@
#!/usr/bin/env sh
# -*- coding: utf-8 -*-
set -e
./wait-for-it.sh migration_runner:8881 --strict -- echo "Migrations were applied"
RUST_BACKTRACE=1 RUST_LOG=info ./bin/gitea_pages "$1"

View file

@ -0,0 +1,57 @@
use dataloader::non_cached::Loader;
use juniper::FieldResult;
use juniper::IntoFieldError;
use uuid_simd::UuidExt;
use uuidv7::Uuid;
use crate::{
api::{loaders, Error},
db, worker,
};
#[derive(Clone)]
pub struct Loaders {
pub user: loaders::user::UserLoader,
pub repository: loaders::repository::RepositoryLoader,
}
impl Default for Loaders {
fn default() -> Self {
Self {
user: Loader::new(loaders::user::UserBatcher)
.with_yield_count(loaders::user::YIELD_COUNT),
repository: Loader::new(loaders::repository::RepositoryBatcher)
.with_yield_count(loaders::repository::YIELD_COUNT),
}
}
}
pub struct Context {
pub db_pool: db::Pool,
pub worker_pool: worker::Pool,
pub loaders: Loaders,
pub logged_in: bool,
}
impl Context {
pub fn get_db_conn(&self) -> FieldResult<db::PoolConnection> {
self.db_pool
.get()
.map_or(Err(Error::Internal.into_field_error()), Ok)
}
pub async fn get_worker_conn(
&self,
) -> FieldResult<bb8::PooledConnection<worker::ConnectionManager>> {
self.worker_pool
.get()
.await
.map_or(Err(Error::Internal.into_field_error()), Ok)
}
pub fn parse_uuid(id: &[u8]) -> FieldResult<Uuid> {
Uuid::parse(id).map_err(|_| Error::InvalidUuid.into_field_error())
}
}
impl juniper::Context for Context {}

99
backend/src/api/error.rs Normal file
View file

@ -0,0 +1,99 @@
use juniper::{graphql_value, FieldError, FieldResult, IntoFieldError, ScalarValue};
pub enum Error {
Internal,
DoesNotExist,
InvalidUuid,
InvalidCredentials,
Unauthenticated,
RepoAlreadyExists,
ExternalRepoDoesNotExist,
RepoPullBranchDoesNotExist,
}
impl<S: ScalarValue> IntoFieldError<S> for Error {
fn into_field_error(self) -> FieldError<S> {
match self {
Self::Internal => FieldError::new(
"Internal server error",
graphql_value!({
"type": "INTERNAL"
}),
),
Self::DoesNotExist => FieldError::new(
"Record does not exist",
graphql_value!({
"type": "DOES_NOT_EXIST"
}),
),
Self::InvalidUuid => FieldError::new(
"Invalid UUID",
graphql_value!({
"type": "INVALID_UUID",
}),
),
Self::InvalidCredentials => FieldError::new(
"Invalid credentials",
graphql_value!({
"type": "INVALID_CREDENTIALS",
}),
),
Self::Unauthenticated => FieldError::new(
"Unauthenticated",
graphql_value!({
"type": "UNAUTHENTICATED",
}),
),
Self::RepoAlreadyExists => FieldError::new(
"Repository already exists",
graphql_value!({
"type": "REPO_ALREADY_EXISTS",
}),
),
Self::ExternalRepoDoesNotExist => FieldError::new(
"Repository does not exist on Git server",
graphql_value!({
"type": "EXTERNAL_REPO_DOES_NOT_EXIST",
}),
),
Self::RepoPullBranchDoesNotExist => FieldError::new(
"Repository does not have pages branch",
graphql_value!({
"type": "REPO_PULL_BRANCH_DOES_NOT_EXIST",
}),
),
}
}
}
pub trait QueryResultIntoFieldResult<T> {
fn into_field_result(self) -> FieldResult<T>;
}
impl<T> QueryResultIntoFieldResult<T> for diesel::QueryResult<T> {
fn into_field_result(self) -> FieldResult<T> {
// match self {
// Ok(x) => Ok(x),
// Err(_) => Err(Error::Internal.into_field_error()),
// }
self.map_err(|_| Error::Internal.into_field_error())
}
}
pub trait AsyncResultIntoFieldResult<T> {
fn into_field_result(self) -> FieldResult<T>;
}
impl AsyncResultIntoFieldResult<celery::task::AsyncResult>
for Result<celery::task::AsyncResult, celery::error::CeleryError>
{
fn into_field_result(self) -> FieldResult<celery::task::AsyncResult> {
// match self {
// Ok(x) => Ok(x),
// Err(_) => Err(Error::Internal.into_field_error()),
// }
self.map_err(|_| Error::Internal.into_field_error())
}
}

View file

@ -0,0 +1,77 @@
use async_trait::async_trait;
use std::clone::Clone;
use std::fmt::Debug;
use std::hash::Hash;
use std::io::{Error, ErrorKind};
pub mod repository;
pub mod user;
#[async_trait]
pub trait TryOptionLoad<K, V>: Clone
where
K: Eq + Hash + Clone + Debug + Send + Sync,
V: Clone + Debug + Send,
{
async fn try_option_load(&self, key: K) -> Result<Option<V>, Error>;
}
#[async_trait]
impl<K, V, F> TryOptionLoad<K, V> for dataloader::non_cached::Loader<K, V, F>
where
K: Eq + Hash + Clone + Debug + Send + Sync,
V: Clone + Debug + Send,
F: dataloader::BatchFn<K, V> + Send + Sync,
{
async fn try_option_load(&self, key: K) -> Result<Option<V>, Error> {
async fn internal_try_option_load<K, V, F>(
loader: &dataloader::non_cached::Loader<K, V, F>,
key: K,
) -> Result<Option<V>, Error>
where
K: Eq + Hash + Clone + Debug + Send + Sync,
V: Clone + Debug + Send,
F: dataloader::BatchFn<K, V> + Send + Sync,
{
match loader.try_load(key).await {
Ok(x) => Ok(Some(x)),
Err(e) => match e.kind() {
ErrorKind::NotFound => Ok(None),
_ => Err(e),
},
}
}
internal_try_option_load(self, key).await
}
}
#[async_trait]
impl<K, V, F> TryOptionLoad<K, V> for dataloader::cached::Loader<K, V, F>
where
K: Eq + Hash + Clone + Debug + Send + Sync,
V: Clone + Debug + Send,
F: dataloader::BatchFn<K, V> + Send + Sync,
{
async fn try_option_load(&self, key: K) -> Result<Option<V>, Error> {
async fn internal_try_option_load<K, V, F>(
loader: &dataloader::cached::Loader<K, V, F>,
key: K,
) -> Result<Option<V>, Error>
where
K: Eq + Hash + Clone + Debug + Send + Sync,
V: Clone + Debug + Send,
F: dataloader::BatchFn<K, V> + Send + Sync,
{
match loader.try_load(key).await {
Ok(x) => Ok(Some(x)),
Err(e) => match e.kind() {
ErrorKind::NotFound => Ok(None),
_ => Err(e),
},
}
}
internal_try_option_load(self, key).await
}
}

View file

@ -0,0 +1,42 @@
use async_trait::async_trait;
use dataloader::non_cached::Loader;
use dataloader::BatchFn;
use diesel::prelude::*;
use std::collections::HashMap;
use uuidv7::Uuid;
use crate::{api::models, db};
pub struct RepositoryBatcher;
#[async_trait]
impl BatchFn<Uuid, models::repository::Repository> for RepositoryBatcher {
async fn load(&mut self, keys: &[Uuid]) -> HashMap<Uuid, models::repository::Repository> {
let db_conn = &mut db::POOL.get().unwrap();
let mut map = HashMap::new();
for row in db::schema::repositories::table
.select((
db::schema::repositories::id,
db::schema::repositories::user_id,
db::schema::repositories::name,
))
.filter(db::schema::repositories::id.eq_any(keys))
.load::<(Uuid, Uuid, String)>(db_conn)
.unwrap()
{
let row: (Uuid, Uuid, String) = row;
let data = models::repository::Repository {
id: row.0,
user_id: row.1,
name: row.2,
};
map.insert(data.id, data);
}
map
}
}
pub type RepositoryLoader = Loader<Uuid, models::repository::Repository, RepositoryBatcher>;
pub const YIELD_COUNT: usize = 100;

View file

@ -0,0 +1,37 @@
use async_trait::async_trait;
use dataloader::non_cached::Loader;
use dataloader::BatchFn;
use diesel::prelude::*;
use std::collections::HashMap;
use uuidv7::Uuid;
use crate::{api::models, db};
pub struct UserBatcher;
#[async_trait]
impl BatchFn<Uuid, models::user::User> for UserBatcher {
async fn load(&mut self, keys: &[Uuid]) -> HashMap<Uuid, models::user::User> {
let db_conn = &mut db::POOL.get().unwrap();
let mut map = HashMap::new();
for row in db::schema::users::table
.select((db::schema::users::id, db::schema::users::name))
.filter(db::schema::users::id.eq_any(keys))
.load::<(Uuid, String)>(db_conn)
.unwrap()
{
let row: (Uuid, String) = row;
let data = models::user::User {
id: row.0,
name: row.1,
};
map.insert(data.id, data);
}
map
}
}
pub type UserLoader = Loader<Uuid, models::user::User, UserBatcher>;
pub const YIELD_COUNT: usize = 100;

227
backend/src/api/mod.rs Normal file
View file

@ -0,0 +1,227 @@
use diesel::prelude::*;
use gritea::client::Gritea;
use juniper::{graphql_object, EmptySubscription, FieldResult, IntoFieldError, RootNode};
use uuidv7::Uuid;
use crate::{db, gritea_ext::GriteaExt, worker, CONFIG};
pub mod context;
pub mod error;
pub mod loaders;
pub mod models;
pub mod scalars;
pub use context::Context;
pub use error::{AsyncResultIntoFieldResult, Error, QueryResultIntoFieldResult};
use loaders::TryOptionLoad;
pub struct Query;
#[graphql_object(context = Context)]
impl Query {
fn ping() -> &'static str {
"pong"
}
fn verify_login(username: String, password: String) -> bool {
username == *CONFIG.user && password == *CONFIG.password
}
async fn user(context: &Context, id: scalars::Uuid) -> FieldResult<models::user::User> {
match context.loaders.user.try_option_load(*id).await {
Ok(Some(user)) => Ok(user),
Ok(None) => Err(Error::DoesNotExist.into_field_error()),
Err(_) => Err(Error::Internal.into_field_error()),
}
}
async fn user_by_name(context: &Context, name: String) -> FieldResult<models::user::User> {
let db_conn = &mut context.get_db_conn()?;
let id = match db::schema::users::table
.select(db::schema::users::id)
.filter(db::schema::users::name.eq(name))
.first::<Uuid>(db_conn)
.optional()
.into_field_result()?
{
Some(x) => x,
None => return Err(Error::DoesNotExist.into_field_error()),
};
context
.loaders
.user
.try_load(id)
.await
.map_err(|_| Error::Internal.into_field_error())
}
async fn users(context: &Context) -> FieldResult<Vec<models::user::User>> {
let db_conn = &mut context.get_db_conn()?;
let ids = db::schema::users::table
.select(db::schema::users::id)
.load::<Uuid>(db_conn)
.into_field_result()?;
context.loaders.user.try_load_many(ids).await.map_or_else(
|_| Err(Error::Internal.into_field_error()),
|x| Ok(x.into_values().collect()),
)
}
async fn repository(
context: &Context,
id: scalars::Uuid,
) -> FieldResult<models::repository::Repository> {
match context.loaders.repository.try_option_load(*id).await {
Ok(Some(user)) => Ok(user),
Ok(None) => Err(Error::DoesNotExist.into_field_error()),
Err(_) => Err(Error::Internal.into_field_error()),
}
}
async fn repositories(context: &Context) -> FieldResult<Vec<models::repository::Repository>> {
let db_conn = &mut context.get_db_conn()?;
let ids = db::schema::repositories::table
.select(db::schema::repositories::id)
.load::<Uuid>(db_conn)
.into_field_result()?;
context
.loaders
.repository
.try_load_many(ids)
.await
.map_or_else(
|_| Err(Error::Internal.into_field_error()),
|x| Ok(x.into_values().collect()),
)
}
}
pub struct Mutation;
#[graphql_object(context = Context)]
impl Mutation {
async fn create_repository(
context: &Context,
input: models::repository::CreateRepositoryInput,
) -> FieldResult<models::repository::Repository> {
if !context.logged_in {
return Err(Error::Unauthenticated.into_field_error());
}
let db_conn = &mut context.get_db_conn()?;
let user_id = db::schema::users::table
.select(db::schema::users::id)
.filter(db::schema::users::name.eq(&input.user))
.first::<Uuid>(db_conn)
.optional()
.into_field_result()?;
if let Some(id) = user_id {
if diesel::select(diesel::dsl::exists(
db::schema::repositories::table
.filter(db::schema::repositories::user_id.eq(id))
.filter(db::schema::repositories::name.eq(&input.name)),
))
.get_result::<bool>(db_conn)
.into_field_result()?
{
return Err(Error::RepoAlreadyExists.into_field_error());
}
}
let escaped_user = urlencoding::encode(&input.user);
let escaped_name = urlencoding::encode(&input.name);
match Gritea::builder(&CONFIG.gitea_url)
.token(&*CONFIG.gitea_api_token)
.build()
{
Ok(client) => {
let repo = match client.get_repo(&escaped_user, &escaped_name).await {
Ok(x) => x,
Err(_) => return Err(Error::ExternalRepoDoesNotExist.into_field_error()),
};
if repo.private {
return Err(Error::ExternalRepoDoesNotExist.into_field_error());
}
let branches = match client.get_repo_branches(&escaped_user, &escaped_name).await {
Ok(x) => x,
Err(_) => return Err(Error::Internal.into_field_error()),
};
if !branches
.into_iter()
.any(|x| x.name == CONFIG.gitea_pull_branch)
{
return Err(Error::RepoPullBranchDoesNotExist.into_field_error());
}
}
Err(_) => return Err(Error::Internal.into_field_error()),
}
let user_id = match user_id {
Some(x) => x,
None => diesel::insert_into(db::schema::users::table)
.values(db::models::NewUser { name: &input.user })
.returning(db::schema::users::id)
.get_result::<Uuid>(db_conn)
.into_field_result()?,
};
let id = diesel::insert_into(db::schema::repositories::table)
.values(db::models::NewRepository {
user_id,
name: &input.name,
})
.returning(db::schema::repositories::id)
.get_result::<Uuid>(db_conn)
.into_field_result()?;
let worker_conn = context.get_worker_conn().await?;
worker_conn
.send_task(worker::get_repo::get_repo::new(id))
.await
.into_field_result()?;
context
.loaders
.repository
.try_load(id)
.await
.map_err(|_| Error::Internal.into_field_error())
}
async fn delete_repository(context: &Context, id: scalars::Uuid) -> FieldResult<bool> {
if !context.logged_in {
return Err(Error::Unauthenticated.into_field_error());
}
let db_conn = &mut context.get_db_conn()?;
if diesel::select(diesel::dsl::not(diesel::dsl::exists(
db::schema::repositories::table.filter(db::schema::repositories::id.eq(*id)),
)))
.get_result::<bool>(db_conn)
.into_field_result()?
{
return Err(Error::DoesNotExist.into_field_error());
}
let worker_conn = context.get_worker_conn().await?;
worker_conn
.send_task(worker::delete_repo::delete_repo::new(*id))
.await
.into_field_result()?;
Ok(true)
}
}
pub type Schema = RootNode<'static, Query, Mutation, EmptySubscription<Context>>;
pub fn schema() -> Schema {
Schema::new(Query, Mutation, EmptySubscription::new())
}

View file

@ -0,0 +1,2 @@
pub mod repository;
pub mod user;

View file

@ -0,0 +1,62 @@
use juniper::{graphql_object, FieldResult, GraphQLInputObject, IntoFieldError};
use uuidv7::Uuid;
use crate::{
api::{models, scalars, Context, Error},
CONFIG,
};
#[derive(Clone, Debug)]
pub struct Repository {
pub id: Uuid,
pub user_id: Uuid,
pub name: String,
}
#[graphql_object(context = Context)]
impl Repository {
fn id(&self) -> scalars::Uuid {
scalars::Uuid(self.id)
}
async fn user(&self, context: &Context) -> FieldResult<models::user::User> {
context
.loaders
.user
.try_load(self.user_id)
.await
.map_err(|_| Error::Internal.into_field_error())
}
fn name(&self) -> &str {
&self.name
}
async fn url(&self, context: &Context, scheme: Option<bool>) -> FieldResult<String> {
let user_name = context
.loaders
.user
.try_load(self.user_id)
.await
.map_err(|_| Error::Internal.into_field_error())?
.name;
Ok(format!(
"{}{}.{}/{}",
if scheme.unwrap_or(true) {
"https://"
} else {
""
},
user_name,
CONFIG.domain,
self.name
))
}
}
#[derive(GraphQLInputObject)]
pub struct CreateRepositoryInput {
pub user: String,
pub name: String,
}

View file

@ -0,0 +1,47 @@
use diesel::prelude::*;
use juniper::{graphql_object, FieldResult, IntoFieldError};
use uuidv7::Uuid;
use crate::{
api::{models, scalars, Context, Error, QueryResultIntoFieldResult},
db,
};
#[derive(Clone, Debug)]
pub struct User {
pub id: Uuid,
pub name: String,
}
#[graphql_object(context = Context)]
impl User {
fn id(&self) -> scalars::Uuid {
scalars::Uuid(self.id)
}
fn name(&self) -> &str {
&self.name
}
async fn repositories(
&self,
context: &Context,
) -> FieldResult<Vec<models::repository::Repository>> {
let db_conn = &mut context.get_db_conn()?;
let ids = db::schema::repositories::table
.select(db::schema::repositories::id)
.filter(db::schema::repositories::user_id.eq(self.id))
.load::<Uuid>(db_conn)
.into_field_result()?;
context
.loaders
.repository
.try_load_many(ids)
.await
.map_or_else(
|_| Err(Error::Internal.into_field_error()),
|x| Ok(x.into_values().collect()),
)
}
}

View file

@ -0,0 +1,3 @@
pub mod uuid;
pub use uuid::Uuid;

View file

@ -0,0 +1,39 @@
use std::ops::Deref;
type Value = uuidv7::Uuid;
pub struct Uuid(pub Value);
#[juniper::graphql_scalar(name = "UUID", description = "UUID encoded as a string")]
impl<S> GraphQLScalar for Uuid
where
S: juniper::ScalarValue,
{
fn resolve(&self) -> juniper::Value {
juniper::Value::scalar(self.0.to_string())
}
fn from_input_value(value: &juniper::InputValue) -> Option<Uuid> {
value
.as_string_value()
.and_then(|s| {
use uuid_simd::UuidExt;
use uuidv7::Uuid;
Uuid::parse(s.as_bytes()).ok()
})
.map(Uuid)
}
fn from_str<'a>(value: juniper::ScalarToken<'a>) -> juniper::ParseScalarResult<'a, S> {
<String as juniper::ParseScalarValue<S>>::from_str(value)
}
}
impl Deref for Uuid {
type Target = Value;
fn deref(&self) -> &Self::Target {
&self.0
}
}

69
backend/src/config.rs Normal file
View file

@ -0,0 +1,69 @@
use anyhow::{bail, Result};
use debug_ignore::DebugIgnore;
use envconfig::Envconfig;
use lazy_static::lazy_static;
use url::Url;
#[derive(Envconfig, Debug)]
pub struct Config {
#[envconfig(from = "PAGES_DB_URL")]
pub db_url: DebugIgnore<String>,
#[envconfig(from = "PAGES_AMQP_URL")]
pub amqp_url: DebugIgnore<String>,
#[envconfig(from = "PAGES_USER")]
pub user: String,
#[envconfig(from = "PAGES_PASSWORD")]
pub password: DebugIgnore<String>,
#[envconfig(from = "PAGES_GITEA_URL")]
pub gitea_url: String,
#[envconfig(from = "PAGES_GITEA_API_TOKEN")]
pub gitea_api_token: DebugIgnore<String>,
#[envconfig(from = "PAGES_GITEA_SECRET")]
pub gitea_secret: DebugIgnore<String>,
#[envconfig(from = "PAGES_GITEA_PULL_URL")]
pub gitea_pull_url: Url,
#[envconfig(from = "PAGES_GITEA_PULL_BRANCH")]
pub gitea_pull_branch: String,
#[envconfig(from = "PAGES_NGINX_CONFIG_DIR")]
pub nginx_config_dir: String,
#[envconfig(from = "PAGES_REPOS_DIR")]
pub repos_dir: String,
#[envconfig(from = "PAGES_DOMAIN")]
pub domain: String,
}
pub const PASSWORD_MIN_LEN: usize = 64;
impl Config {
pub fn validate(&self) -> Result<()> {
if self.password.len() < PASSWORD_MIN_LEN {
bail!(
"Password is too short: {} < {}",
self.password.len(),
PASSWORD_MIN_LEN
);
}
Ok(())
}
}
#[derive(Debug)]
pub struct DynConfig {
pub cloudflare_zone_name: String,
}
lazy_static! {
pub static ref CONFIG: Config = Config::init_from_env().unwrap();
}

29
backend/src/db/mod.rs Normal file
View file

@ -0,0 +1,29 @@
use anyhow::Result;
use diesel::pg::PgConnection;
use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, PooledConnection};
use lazy_static::lazy_static;
use crate::CONFIG;
pub mod models;
pub mod schema;
pub type Pool = diesel::r2d2::Pool<ConnectionManager<PgConnection>>;
pub type Connection = PgConnection;
pub type PoolConnection = PooledConnection<ConnectionManager<PgConnection>>;
pub fn establish_connection() -> ConnectionResult<PgConnection> {
use diesel::Connection;
PgConnection::establish(&CONFIG.db_url)
}
pub fn pool() -> Result<Pool> {
Ok(diesel::r2d2::Pool::builder()
.build(ConnectionManager::<PgConnection>::new(&*CONFIG.db_url))?)
}
lazy_static! {
pub static ref POOL: Pool = pool().unwrap();
}

37
backend/src/db/models.rs Normal file
View file

@ -0,0 +1,37 @@
use chrono::prelude::*;
use diesel::prelude::*;
use uuidv7::Uuid;
use crate::db::schema;
#[derive(Identifiable, Queryable, Debug)]
#[diesel(table_name = schema::users)]
pub struct User {
pub id: Uuid,
pub name: String,
pub created_at: DateTime<Utc>,
pub updated_at: Option<DateTime<Utc>>,
}
#[derive(Insertable, Debug)]
#[diesel(table_name = schema::users)]
pub struct NewUser<'a> {
pub name: &'a str,
}
#[derive(Identifiable, Queryable, Debug)]
#[diesel(table_name = schema::repositories)]
pub struct Repository {
pub id: Uuid,
pub user_id: Uuid,
pub name: String,
pub created_at: DateTime<Utc>,
pub updated_at: Option<DateTime<Utc>>,
}
#[derive(Insertable, Debug)]
#[diesel(table_name = schema::repositories)]
pub struct NewRepository<'a> {
pub user_id: Uuid,
pub name: &'a str,
}

18
backend/src/db/schema.rs Normal file
View file

@ -0,0 +1,18 @@
diesel::table! {
users {
id -> Uuid,
name -> Text,
created_at -> Timestamptz,
updated_at -> Nullable<Timestamptz>,
}
}
diesel::table! {
repositories {
id -> Uuid,
user_id -> Uuid,
name -> Text,
created_at -> Timestamptz,
updated_at -> Nullable<Timestamptz>,
}
}

68
backend/src/gritea_ext.rs Normal file
View file

@ -0,0 +1,68 @@
use async_trait::async_trait;
use chrono::prelude::*;
use gritea::{
client::{resp_json, Gritea},
Result,
};
use http::Method;
use serde::Deserialize;
#[derive(Deserialize, Debug)]
pub struct PayloadUser {
pub email: String,
pub name: String,
pub username: String,
}
#[derive(Deserialize, Debug)]
pub struct PayloadCommitVerification {
pub payload: String,
pub reason: String,
pub signature: String,
pub signer: Option<PayloadUser>,
pub verified: bool,
}
#[derive(Deserialize, Debug)]
pub struct PayloadCommit {
pub added: Option<Vec<String>>,
pub author: PayloadUser,
pub committer: PayloadUser,
pub id: String,
pub message: String,
pub modified: Option<Vec<String>>,
pub removed: Option<Vec<String>>,
pub timestamp: DateTime<Utc>,
pub url: String,
pub verfification: Option<PayloadCommitVerification>,
}
#[derive(Deserialize, Debug)]
pub struct Branch {
pub commit: PayloadCommit,
pub effective_branch_protection_name: String,
pub enable_status_check: bool,
pub name: String,
pub protected: bool,
pub required_approvals: i64,
pub status_check_contexts: Vec<String>,
pub user_can_merge: bool,
pub user_can_push: bool,
}
#[async_trait]
pub trait GriteaExt {
async fn get_repo_branches(&self, owner: &str, repo: &str) -> Result<Vec<Branch>>;
}
#[async_trait]
impl GriteaExt for Gritea {
async fn get_repo_branches(&self, owner: &str, repo: &str) -> Result<Vec<Branch>> {
let resp = self
.request(Method::GET, &format!("repos/{}/{}/branches", owner, repo))?
.send()
.await?;
resp_json(resp, "get repo branches failed").await
}
}

35
backend/src/lib.rs Normal file
View file

@ -0,0 +1,35 @@
use anyhow::Result;
use askama::Template;
use std::fs;
pub mod api;
pub mod config;
pub mod db;
pub mod gritea_ext;
pub mod templates;
pub mod worker;
pub use config::CONFIG;
pub const ASCII_LOGO: &str = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/logo.txt"));
pub fn init() -> Result<()> {
println!("{}\n", ASCII_LOGO);
CONFIG.validate()?;
Ok(())
}
pub fn init_nginx() -> Result<()> {
let config = templates::NginxConfig {
domain_segments: CONFIG.domain.split('.').collect(),
}
.render()?;
log::info!("Updating Nginx config");
fs::write(
format!("{}/gitea_pages.conf", CONFIG.nginx_config_dir),
config,
)?;
Ok(())
}

221
backend/src/main.rs Normal file
View file

@ -0,0 +1,221 @@
#[cfg(not(target_env = "msvc"))]
use tikv_jemallocator::Jemalloc;
#[cfg(not(target_env = "msvc"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
use anyhow::Result;
use clap::{Parser, Subcommand};
use diesel::prelude::*;
use uuidv7::Uuid;
use gitea_pages::{api, db, init, init_nginx, worker, CONFIG};
#[derive(Debug, Parser)]
#[clap(author, version, about, long_about = None)]
struct Cli {
#[clap(subcommand)]
commands: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
#[clap(about = "Starts Celery worker")]
Worker,
#[clap(about = "Starts beat for Celery worker")]
Beat,
#[clap(about = "Starts API")]
Api,
}
#[tokio::main]
async fn main() -> Result<()> {
let args = Cli::parse();
env_logger::init();
init()?;
match args.commands {
Commands::Worker => {
init_nginx()?;
let worker_conn = &worker::POOL.get().await.get().await?;
worker_conn.display_pretty().await;
worker_conn.consume_from(&[worker::QUEUE_NAME]).await?;
}
Commands::Beat => {
worker::beat().await?.start().await?;
}
Commands::Api => {
use actix_cors::Cors;
use actix_web::{
http::header, middleware, web, App, HttpRequest, HttpResponse, HttpServer,
};
use anyhow::Result;
use juniper_actix::graphql_handler;
use serde::Deserialize;
use std::str::FromStr;
async fn not_found() -> &'static str {
"Not found!"
}
#[derive(Deserialize, Debug)]
struct Owner {
username: String,
}
#[derive(Deserialize, Debug)]
struct Repository {
owner: Owner,
name: String,
}
#[derive(Deserialize, Debug)]
struct Payload {
repository: Repository,
}
async fn webhook(req: HttpRequest, body: web::Bytes) -> HttpResponse {
if let Some(content_type) = req.headers().get(header::CONTENT_TYPE) {
if content_type.as_bytes() != b"application/json" {
return HttpResponse::BadRequest()
.body("Content-Type not application/json");
}
}
let key =
ring::hmac::Key::new(ring::hmac::HMAC_SHA256, CONFIG.gitea_secret.as_bytes());
let signature = match req.headers().get("X-Gitea-Signature") {
Some(x) => x,
None => return HttpResponse::BadRequest().body("X-Gitea-Signature not given"),
};
let mut tag = vec![0u8; signature.len() / 2];
if hex_simd::decode(signature.as_bytes(), hex_simd::Out::from_slice(&mut tag))
.is_err()
{
return HttpResponse::BadRequest().body("Could not decode signature");
}
if ring::hmac::verify(&key, body.as_ref(), &tag).is_err() {
return HttpResponse::BadRequest().body("Invalid signature");
}
let payload = match serde_json::from_slice::<Payload>(body.as_ref()) {
Ok(x) => x,
Err(_) => return HttpResponse::BadRequest().body("Payload has invalid JSON"),
};
let db_conn = &mut match db::POOL.get() {
Ok(x) => x,
Err(_) => return HttpResponse::InternalServerError().finish(),
};
let user_id = match db::schema::users::table
.select(db::schema::users::id)
.filter(db::schema::users::name.eq(payload.repository.owner.username))
.first::<Uuid>(db_conn)
.optional()
{
Ok(x) => match x {
Some(y) => y,
None => {
return HttpResponse::BadRequest().body("Repository is not allowed")
}
},
Err(_) => return HttpResponse::InternalServerError().finish(),
};
let repo_id = match db::schema::repositories::table
.select(db::schema::repositories::id)
.filter(db::schema::repositories::user_id.eq(user_id))
.filter(db::schema::repositories::name.eq(payload.repository.name))
.first::<Uuid>(db_conn)
.optional()
{
Ok(x) => match x {
Some(y) => y,
None => {
return HttpResponse::BadRequest().body("Repository is not allowed")
}
},
Err(_) => return HttpResponse::InternalServerError().finish(),
};
let worker_conn = match worker::POOL.get().await.get().await {
Ok(x) => x,
Err(_) => return HttpResponse::InternalServerError().finish(),
};
if worker_conn
.send_task(worker::get_repo::get_repo::new(repo_id))
.await
.is_err()
{
return HttpResponse::InternalServerError().finish();
}
HttpResponse::Ok().finish()
}
async fn graphql_route(
req: HttpRequest,
payload: web::Payload,
schema: web::Data<api::Schema>,
) -> Result<HttpResponse, actix_web::Error> {
let logged_in = match req
.headers()
.get(header::AUTHORIZATION)
.and_then(|x| x.to_str().ok())
{
Some(x) => match http_auth_basic::Credentials::from_str(x) {
Ok(cred) => {
cred.user_id == CONFIG.user && cred.password == *CONFIG.password
}
Err(_) => false,
},
None => false,
};
let context = api::Context {
db_pool: db::POOL.clone(),
worker_pool: worker::POOL.get().await.clone(),
loaders: api::context::Loaders::default(),
logged_in,
};
graphql_handler(&schema, &context, req, payload).await
}
HttpServer::new(move || {
App::new()
.app_data(web::Data::new(api::schema()))
.wrap(middleware::Logger::default())
.wrap(middleware::Compress::default())
.wrap(
Cors::default()
.allow_any_origin()
.allowed_methods(["POST", "GET"])
.allowed_headers([header::AUTHORIZATION, header::ACCEPT])
.allowed_header(header::CONTENT_TYPE)
.supports_credentials()
.max_age(3600),
)
.service(web::resource("/webhook").route(web::post().to(webhook)))
.service(
web::resource("/graphql")
.route(web::post().to(graphql_route))
.route(web::get().to(graphql_route)),
)
.default_service(web::to(not_found))
})
.bind(("0.0.0.0", 8080))?
.run()
.await?;
}
}
Ok(())
}

7
backend/src/templates.rs Normal file
View file

@ -0,0 +1,7 @@
use askama::Template;
#[derive(Template)]
#[template(path = "gitea_pages.conf", escape = "none")]
pub struct NginxConfig<'a> {
pub domain_segments: Vec<&'a str>,
}

View file

@ -0,0 +1,53 @@
use anyhow::Result;
use celery::{error::TaskError, task::TaskResult};
use diesel::prelude::*;
use std::fs;
use uuidv7::Uuid;
use crate::{db, CONFIG};
fn do_task(db_conn: &mut db::Connection, id: Uuid) -> Result<()> {
let (user_id, name) = db::schema::repositories::table
.select((
db::schema::repositories::user_id,
db::schema::repositories::name,
))
.filter(db::schema::repositories::id.eq(id))
.first::<(Uuid, String)>(db_conn)?;
let user_name = db::schema::users::table
.select(db::schema::users::name)
.filter(db::schema::users::id.eq(user_id))
.first::<String>(db_conn)?;
diesel::delete(db::schema::repositories::table.filter(db::schema::repositories::id.eq(id)))
.execute(db_conn)?;
if db::schema::repositories::table
.filter(db::schema::repositories::user_id.eq(user_id))
.count()
.get_result::<i64>(db_conn)?
== 0
{
diesel::delete(db::schema::users::table.filter(db::schema::users::id.eq(user_id)))
.execute(db_conn)?;
fs::remove_dir_all(format!("{}/{}", CONFIG.repos_dir, user_name))?;
} else {
fs::remove_dir_all(format!("{}/{}/{}", CONFIG.repos_dir, user_name, name))?;
}
Ok(())
}
#[celery::task]
pub fn delete_repo(id: Uuid) -> TaskResult<()> {
let db_conn = &mut match db::POOL.get() {
Ok(x) => x,
Err(e) => return Err(TaskError::UnexpectedError(format!("{:?}", e))),
};
if let Err(e) = do_task(db_conn, id) {
return Err(TaskError::UnexpectedError(format!("{:?}", e)));
}
Ok(())
}

View file

@ -0,0 +1,100 @@
use anyhow::{bail, Context, Result};
use celery::{error::TaskError, task::TaskResult};
use diesel::prelude::*;
use std::{fs, path::Path};
use url::Url;
use uuidv7::Uuid;
use crate::{db, CONFIG};
fn get_repo_name(db_conn: &mut db::Connection, id: Uuid) -> Result<(String, String)> {
let (user_id, name) = db::schema::repositories::table
.select((
db::schema::repositories::user_id,
db::schema::repositories::name,
))
.filter(db::schema::repositories::id.eq(id))
.first::<(Uuid, String)>(db_conn)?;
let user_name = db::schema::users::table
.select(db::schema::users::name)
.filter(db::schema::users::id.eq(user_id))
.first::<String>(db_conn)?;
Ok((user_name, name))
}
fn repo_dir(user: &str, repo: &str) -> (String, String, String) {
let parent = format!("{}/{}", CONFIG.repos_dir, user);
let dir = format!("{}/{}", parent, repo);
(parent, dir, format!("{}/{}.git", user, repo))
}
fn do_task(parent_dir: &str, repo_dir: &str, full_name_path: &str) -> Result<()> {
let path = Path::new(repo_dir);
if path.exists() && path.is_dir() {
let repo = git2::Repository::open(repo_dir)?;
repo.find_remote("origin")?
.fetch(&[&CONFIG.gitea_pull_branch], None, None)?;
let fetch_head = repo.find_reference("FETCH_HEAD")?;
let fetch_commit = repo.reference_to_annotated_commit(&fetch_head)?;
let analysis = repo.merge_analysis(&[&fetch_commit])?;
if !analysis.0.is_up_to_date() {
if analysis.0.is_fast_forward() {
let refname = format!("refs/heads/{}", CONFIG.gitea_pull_branch);
let mut reference = repo.find_reference(&refname)?;
reference.set_target(fetch_commit.id(), "Fast-Forward")?;
repo.set_head(&refname)?;
repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?;
} else {
bail!("Fast-forward only!");
}
}
} else {
fs::create_dir_all(parent_dir)?;
let repo = git2::Repository::clone(
Url::parse(CONFIG.gitea_pull_url.as_str())?
.join(full_name_path)?
.as_str(),
repo_dir,
)?;
let (object, reference) =
repo.revparse_ext(&format!("remotes/origin/{}", CONFIG.gitea_pull_branch))?;
repo.checkout_tree(&object, None)?;
match reference {
Some(gref) => repo.set_head(gref.name().context("Could not get ref name")?),
None => repo.set_head_detached(object.id()),
}
.context("Failed to set HEAD")?;
};
Ok(())
}
#[celery::task]
pub async fn get_repo(id: Uuid) -> TaskResult<()> {
let db_conn = &mut match db::POOL.get() {
Ok(x) => x,
Err(e) => return Err(TaskError::UnexpectedError(format!("{:?}", e))),
};
let (user_name, repo_name) = match get_repo_name(db_conn, id) {
Ok(x) => x,
Err(e) => return Err(TaskError::UnexpectedError(format!("{:?}", e))),
};
let (parent_dir, repo_dir, full_name_path) = repo_dir(&user_name, &repo_name);
if let Err(e) = do_task(&parent_dir, &repo_dir, &full_name_path) {
if let Err(err) = fs::remove_dir_all(repo_dir) {
return Err(TaskError::UnexpectedError(format!("{:?}", err)));
}
return Err(TaskError::UnexpectedError(format!("{:?}", e)));
}
Ok(())
}

89
backend/src/worker/mod.rs Normal file
View file

@ -0,0 +1,89 @@
use anyhow::Result;
use async_once::AsyncOnce;
use async_trait::async_trait;
use celery::beat::{Beat, DeltaSchedule, LocalSchedulerBackend};
use celery::prelude::*;
use celery::Celery;
use lazy_static::lazy_static;
use std::sync::Arc;
use std::time::Duration;
use stdext::duration::DurationExt;
pub mod delete_repo;
pub mod get_repo;
pub mod update_repos;
use crate::CONFIG;
pub const QUEUE_NAME: &str = "gitea_pages";
pub async fn app() -> Result<Arc<Celery>, CeleryError> {
celery::app!(
broker = AMQPBroker { &CONFIG.amqp_url },
tasks = [
get_repo::get_repo,
delete_repo::delete_repo,
update_repos::update_repos,
],
task_routes = [
"*" => QUEUE_NAME,
],
prefetch_count = 2,
heartbeat = Some(10)
)
.await
}
pub async fn beat() -> Result<Beat<LocalSchedulerBackend>, BeatError> {
celery::beat!(
broker = AMQPBroker { &CONFIG.amqp_url },
tasks = [
// "cleanup_tokens" => {
// cleanup_tokens::cleanup_tokens,
// schedule = DeltaSchedule::new(Duration::from_hours(1)),
// args = (),
// }
"update_repos" => {
update_repos::update_repos,
schedule = DeltaSchedule::new(Duration::from_days(1)),
args = (),
},
],
task_routes = [
"*" => QUEUE_NAME,
]
)
.await
}
pub type Connection = Arc<Celery>;
pub struct ConnectionManager;
#[async_trait]
impl bb8::ManageConnection for ConnectionManager {
type Connection = Connection;
type Error = CeleryError;
async fn connect(&self) -> Result<Self::Connection, Self::Error> {
app().await
}
async fn is_valid(&self, _conn: &mut Self::Connection) -> Result<(), Self::Error> {
Ok(())
}
fn has_broken(&self, _: &mut Self::Connection) -> bool {
false
}
}
pub type Pool = bb8::Pool<ConnectionManager>;
pub async fn pool() -> Result<Pool> {
Ok(bb8::Pool::builder().build(ConnectionManager).await?)
}
lazy_static! {
pub static ref POOL: AsyncOnce<Pool> = AsyncOnce::new(async { pool().await.unwrap() });
}

View file

@ -0,0 +1,31 @@
use anyhow::Result;
use celery::prelude::*;
use diesel::prelude::*;
use uuidv7::Uuid;
use crate::{db, worker};
async fn do_task() -> Result<()> {
let db_conn = &mut db::POOL.get()?;
let repo_ids = db::schema::repositories::table
.select(db::schema::repositories::id)
.load::<Uuid>(db_conn)?;
let worker_conn = worker::POOL.get().await.get().await?;
for id in repo_ids {
worker_conn
.send_task(worker::get_repo::get_repo::new(id))
.await?;
}
Ok(())
}
#[celery::task]
pub async fn update_repos() -> TaskResult<()> {
if let Err(e) = do_task().await {
return Err(TaskError::UnexpectedError(format!("{:?}", e)));
}
Ok(())
}

View file

@ -0,0 +1,28 @@
map $host $subdomain {
~^(?P<sub>.+)\.{{ domain_segments|join("\\.") }}$ $sub;
}
server {
listen 80;
server_name *.{{ domain_segments|join(".") }};
root /var/www/repos/$subdomain;
location = / {
autoindex on;
}
location / {
try_files $uri $uri/ /index.html;
index index.html index.htm;
if (!-e $request_filename) {
return 404;
}
}
location ~ /\.git {
deny all;
return 404;
}
}