Skip to content

Commit

Permalink
refactor: split main
Browse files Browse the repository at this point in the history
  • Loading branch information
KisaragiEffective committed Mar 13, 2024
1 parent f4adc1e commit 908d255
Show file tree
Hide file tree
Showing 5 changed files with 181 additions and 159 deletions.
166 changes: 8 additions & 158 deletions packages/toy-blog/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,58 +8,15 @@ mod extension;
mod service;
mod migration;

use std::fs::File;
use std::io::{BufReader, Read, stdin, Write};
use std::path::PathBuf;
use actix_web::{App, HttpServer};
use actix_web::middleware::Logger;

use actix_web::web::scope as prefixed_service;
use anyhow::{bail, Context as _, Result};
use actix_web_httpauth::extractors::bearer::Config as BearerAuthConfig;
use clap::{Parser, Subcommand};
use std::io::Read;

use anyhow::Result;
use clap::Parser;
use fern::colors::ColoredLevelConfig;
use log::{debug, info};
use serde_json::Value;
use service::rest::auth::WRITE_TOKEN;

use crate::service::rest::api::{article, meta};
use crate::service::rest::cors::middleware_factory as cors_middleware_factory;
use toy_blog_endpoint_model::{ArticleId, Visibility};
use crate::service::rest::api::list::{article_id_list, article_id_list_by_year, article_id_list_by_year_and_month};
use crate::service::cli::{Args, Commands};
use crate::service::rest::repository::GLOBAL_FILE;
use crate::service::persistence::ArticleRepository;

#[derive(Parser)]
struct Args {
#[clap(subcommand)]
subcommand: Commands
}

#[derive(Subcommand)]
enum Commands {
Run {
#[clap(long)]
http_port: u16,
#[clap(long)]
http_host: String,
#[clap(long = "cloudflare")]
cloudflare_support: bool,
/// DEPRECATED, It will be removed in next major version. This switch is no-op.
#[clap(long)]
read_bearer_token_from_stdin: bool,
},
Import {
#[clap(long)]
file_path: PathBuf,
#[clap(long)]
article_id: ArticleId,
},
Version {
#[clap(long)]
plain: bool,
}
}

fn setup_logger() -> Result<()> {
let colors = ColoredLevelConfig::new();
Expand Down Expand Up @@ -91,117 +48,10 @@ async fn main() -> Result<()> {
cloudflare_support,
read_bearer_token_from_stdin: _
} => {
let bearer_token = {
let mut buf = String::new();
stdin().read_line(&mut buf).expect("failed to read from stdin");
buf.trim_end().to_string()
};

const PATH: &str = "data/article.json";

// migration

{
#[allow(unused_qualifications)]
let migrated_data = crate::migration::migrate_article_repr(
serde_json::from_reader::<_, Value>(File::open(PATH).expect("failed to read existing config"))
.expect("failed to deserialize config")
);

info!("migrated");

serde_json::to_writer(
File::options().write(true).truncate(true).open(PATH).expect("failed to write over existing config"),
&migrated_data
)
.expect("failed to serialize config");
}

GLOBAL_FILE.set(ArticleRepository::new(PATH).await).expect("unreachable!");

WRITE_TOKEN.set(bearer_token).unwrap();

let http_server = HttpServer::new(move || {
let logger_format = if cloudflare_support {
r#"%a (CF '%{CF-Connecting-IP}i') %t "%r" %s "%{Referer}i" "%{User-Agent}i" "#
} else {
r#"%a %t "%r" %s "%{Referer}i" "%{User-Agent}i" "#
};

App::new()
.service(prefixed_service("/api")
.service(
(
prefixed_service("/article")
.service(
(
article::create,
article::fetch,
article::update,
article::remove,
)
),
prefixed_service("/meta")
.service(meta::change_id),
prefixed_service("/list")
.service(article_id_list)
.service(article_id_list_by_year)
.service(article_id_list_by_year_and_month)
)
)
)
.app_data(
BearerAuthConfig::default()
.realm("Perform write operation")
.scope("article:write"),
)
.wrap(Logger::new(logger_format))
.wrap(cors_middleware_factory())
});

println!("running!");
http_server
.bind((http_host, http_port))?
.run()
.await
.context("while running server")?;

Ok(())
}
crate::service::rest::boot_http_server(http_port, &http_host, cloudflare_support).await
}
Commands::Import { file_path, article_id } => {
if !file_path.exists() {
bail!("You can not import non-existent file")
}

if !file_path.is_file() {
// TODO: /dev/stdin is not supported by this method
debug!("is_dir: {}", file_path.is_dir());
debug!("is_symlink: {}", file_path.is_symlink());
debug!("metadata: {:?}", file_path.metadata()?);
bail!("Non-file paths are not supported")
}

let content = {
let mut fd = BufReader::new(File::open(file_path)?);
let mut buf = vec![];
fd.read_to_end(&mut buf)?;
String::from_utf8(buf)
};

match content {
Ok(content) => {
GLOBAL_FILE.get().expect("must be fully-initialized").create_entry(&article_id, content, Visibility::Private).await?;
info!("Successfully imported as {article_id}.");
Ok(())
}
Err(err) => {
bail!("The file is not UTF-8: {err}\
Please review following list:\
- The file is not binary\
- The text is encoded with UTF-8\
Especially, importing Shift-JIS texts are NOT supported.")
}
}
crate::service::import::import(&file_path, &article_id).await
}
Commands::Version { plain } => {
const VERSION: &str = env!("CARGO_PKG_VERSION");
Expand Down
4 changes: 3 additions & 1 deletion packages/toy-blog/src/service.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
pub mod rest;
pub(super) mod rest;
pub mod persistence;
#[cfg(feature = "unstable_activitypub")]
pub mod activitypub;
pub(super) mod cli;
pub(super) mod import;
34 changes: 34 additions & 0 deletions packages/toy-blog/src/service/cli.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
use std::path::PathBuf;
use clap::{Parser, Subcommand};
use toy_blog_endpoint_model::ArticleId;

#[derive(Parser)]
pub struct Args {
#[clap(subcommand)]
pub subcommand: Commands
}

#[derive(Subcommand)]
pub enum Commands {
Run {
#[clap(long)]
http_port: u16,
#[clap(long)]
http_host: String,
#[clap(long = "cloudflare")]
cloudflare_support: bool,
/// DEPRECATED, It will be removed in next major version. This switch is no-op.
#[clap(long)]
read_bearer_token_from_stdin: bool,
},
Import {
#[clap(long)]
file_path: PathBuf,
#[clap(long)]
article_id: ArticleId,
},
Version {
#[clap(long)]
plain: bool,
}
}
43 changes: 43 additions & 0 deletions packages/toy-blog/src/service/import.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
use std::fs::File;
use std::io::{BufReader, Read};
use std::path::Path;
use anyhow::bail;
use log::{debug, info};
use toy_blog_endpoint_model::{ArticleId, Visibility};
use crate::service::rest::repository::GLOBAL_FILE;

pub async fn import(file_path: &Path, article_id: &ArticleId) -> Result<(), anyhow::Error> {
if !file_path.exists() {
bail!("You can not import non-existent file")
}

if !file_path.is_file() {
// TODO: /dev/stdin is not supported by this method
debug!("is_dir: {}", file_path.is_dir());
debug!("is_symlink: {}", file_path.is_symlink());
debug!("metadata: {:?}", file_path.metadata()?);
bail!("Non-file paths are not supported")
}

let content = {
let mut fd = BufReader::new(File::open(file_path)?);
let mut buf = vec![];
fd.read_to_end(&mut buf)?;
String::from_utf8(buf)
};

match content {
Ok(content) => {
GLOBAL_FILE.get().expect("must be fully-initialized").create_entry(article_id, content, Visibility::Private).await?;
info!("Successfully imported as {article_id}.");
Ok(())
}
Err(err) => {
bail!("The file is not UTF-8: {err}\
Please review following list:\
- The file is not binary\
- The text is encoded with UTF-8\
Especially, importing Shift-JIS texts are NOT supported.")
}
}
}
93 changes: 93 additions & 0 deletions packages/toy-blog/src/service/rest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,21 @@ pub mod repository;
pub mod auth;
pub mod exposed_representation_format;

use std::fs::File;
use std::io::stdin;
use actix_web::{App, HttpServer};
use actix_web::middleware::Logger;
use anyhow::Context;
use log::info;
use serde_json::Value;
use inner_no_leak::ComposeInternalError;
use crate::service::persistence::ArticleRepository;
use crate::service::rest::api::{article, meta};
use crate::service::rest::api::list::{article_id_list, article_id_list_by_year, article_id_list_by_year_and_month};
use crate::service::rest::auth::WRITE_TOKEN;
use crate::service::rest::repository::GLOBAL_FILE;
use actix_web::web::scope as prefixed_service;
use actix_web_httpauth::extractors::bearer::Config as BearerAuthConfig;

mod inner_no_leak {
use std::error::Error;
Expand All @@ -22,3 +36,82 @@ mod inner_no_leak {
}
}
}

pub async fn boot_http_server(port: u16, host: &str, proxied_by_cloudflare: bool) -> Result<(), anyhow::Error> {
let bearer_token = {
let mut buf = String::new();
stdin().read_line(&mut buf).expect("failed to read from stdin");
buf.trim_end().to_string()
};

const PATH: &str = "data/article.json";

// migration

{
#[allow(unused_qualifications)]
let migrated_data = crate::migration::migrate_article_repr(
serde_json::from_reader::<_, Value>(File::open(PATH).expect("failed to read existing config"))
.expect("failed to deserialize config")
);

info!("migrated");

serde_json::to_writer(
File::options().write(true).truncate(true).open(PATH).expect("failed to write over existing config"),
&migrated_data
)
.expect("failed to serialize config");
}

GLOBAL_FILE.set(ArticleRepository::new(PATH).await).expect("unreachable!");

WRITE_TOKEN.set(bearer_token).unwrap();

let http_server = HttpServer::new(move || {
let logger_format = if proxied_by_cloudflare {
r#"%a (CF '%{CF-Connecting-IP}i') %t "%r" %s "%{Referer}i" "%{User-Agent}i" "#
} else {
r#"%a %t "%r" %s "%{Referer}i" "%{User-Agent}i" "#
};

App::new()
.service(prefixed_service("/api")
.service(
(
prefixed_service("/article")
.service(
(
article::create,
article::fetch,
article::update,
article::remove,
)
),
prefixed_service("/meta")
.service(meta::change_id),
prefixed_service("/list")
.service(article_id_list)
.service(article_id_list_by_year)
.service(article_id_list_by_year_and_month)
)
)
)
.app_data(
BearerAuthConfig::default()
.realm("Perform write operation")
.scope("article:write"),
)
.wrap(Logger::new(logger_format))
.wrap(crate::service::rest::cors::middleware_factory())
});

println!("running!");
http_server
.bind((host, port))?
.run()
.await
.context("while running server")?;

Ok(())
}

0 comments on commit 908d255

Please sign in to comment.