Use structopt.

Code becomes a bit simpler than with raw clap.
This commit is contained in:
Rasmus Kaj 2019-05-16 22:26:10 +02:00
parent f76942bfce
commit 43a259e658
10 changed files with 393 additions and 343 deletions

View File

@ -12,7 +12,6 @@ ructe = { version = "^0.6.2", features = ["sass", "mime03"] }
[dependencies] [dependencies]
brotli2 = "*" brotli2 = "*"
chrono = "~0.4.0" # Must match version used by diesel chrono = "~0.4.0" # Must match version used by diesel
clap = { version = "^2.19", features = [ "color", "wrap_help" ] }
diesel = { version = "1.4.0", features = ["r2d2", "chrono", "postgres"] } diesel = { version = "1.4.0", features = ["r2d2", "chrono", "postgres"] }
djangohashers = "*" djangohashers = "*"
dotenv = "0.14.0" dotenv = "0.14.0"
@ -32,5 +31,6 @@ rust-crypto = "0.2.36"
serde = { version = "1.0.0", features = ["derive"] } serde = { version = "1.0.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
slug = "0.1" slug = "0.1"
structopt = { version = "0.2.14", features = ["wrap_help"] }
time = "*" time = "*"
warp = "0.1.6" warp = "0.1.6"

View File

@ -2,6 +2,7 @@ use super::result::Error;
use crate::models::{Camera, Modification, Photo}; use crate::models::{Camera, Modification, Photo};
use crate::myexif::ExifData; use crate::myexif::ExifData;
use crate::photosdir::PhotosDir; use crate::photosdir::PhotosDir;
use crate::{DbOpt, DirOpt};
use diesel::insert_into; use diesel::insert_into;
use diesel::pg::PgConnection; use diesel::pg::PgConnection;
use diesel::prelude::*; use diesel::prelude::*;
@ -9,8 +10,41 @@ use image::GenericImageView;
use log::{debug, info, warn}; use log::{debug, info, warn};
use std::path::Path; use std::path::Path;
use std::time::Instant; use std::time::Instant;
use structopt::StructOpt;
pub fn crawl( #[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub struct Findphotos {
#[structopt(flatten)]
db: DbOpt,
#[structopt(flatten)]
photos: DirOpt,
/// Base directory to search in (relative to the image root).
#[structopt()]
base: Vec<String>,
}
impl Findphotos {
pub fn run(&self) -> Result<(), Error> {
let pd = PhotosDir::new(&self.photos.photos_dir);
let db = self.db.connect()?;
if !self.base.is_empty() {
for base in &self.base {
crawl(&db, &pd, Path::new(base)).map_err(|e| {
Error::Other(format!("Failed to crawl {}: {}", base, e))
})?;
}
} else {
crawl(&db, &pd, Path::new("")).map_err(|e| {
Error::Other(format!("Failed to crawl: {}", e))
})?;
}
Ok(())
}
}
fn crawl(
db: &PgConnection, db: &PgConnection,
photos: &PhotosDir, photos: &PhotosDir,
only_in: &Path, only_in: &Path,
@ -25,33 +59,45 @@ pub fn crawl(
Ok(()) Ok(())
} }
pub fn find_sizes(db: &PgConnection, pd: &PhotosDir) -> Result<(), Error> { #[derive(StructOpt)]
use crate::schema::photos::dsl as p; #[structopt(rename_all = "kebab-case")]
let start = Instant::now(); pub struct FindSizes {
let mut c = 0; #[structopt(flatten)]
while start.elapsed().as_secs() < 5 { db: DbOpt,
let photos = Photo::query(true) #[structopt(flatten)]
.filter(p::width.is_null()) photos: DirOpt,
.filter(p::height.is_null()) }
.order((p::is_public.desc(), p::date.desc().nulls_last()))
.limit(10)
.load::<Photo>(db)?;
if photos.is_empty() { impl FindSizes {
break; pub fn run(&self) -> Result<(), Error> {
} else { let db = self.db.connect()?;
c += photos.len(); let pd = PhotosDir::new(&self.photos.photos_dir);
} use crate::schema::photos::dsl as p;
let start = Instant::now();
let mut c = 0;
while start.elapsed().as_secs() < 5 {
let photos = Photo::query(true)
.filter(p::width.is_null())
.filter(p::height.is_null())
.order((p::is_public.desc(), p::date.desc().nulls_last()))
.limit(10)
.load::<Photo>(&db)?;
for photo in photos { if photos.is_empty() {
let path = pd.get_raw_path(&photo); break;
let (width, height) = } else {
match ExifData::read_from(&path).and_then(|exif| { c += photos.len();
Ok(( }
exif.width.ok_or(Error::MissingWidth)?,
exif.height.ok_or(Error::MissingHeight)?, for photo in photos {
)) let path = pd.get_raw_path(&photo);
}) { let (width, height) = match ExifData::read_from(&path)
.and_then(|exif| {
Ok((
exif.width.ok_or(Error::MissingWidth)?,
exif.height.ok_or(Error::MissingHeight)?,
))
}) {
Ok((width, height)) => (width, height), Ok((width, height)) => (width, height),
Err(e) => { Err(e) => {
info!( info!(
@ -69,21 +115,25 @@ pub fn find_sizes(db: &PgConnection, pd: &PhotosDir) -> Result<(), Error> {
(image.width(), image.height()) (image.width(), image.height())
} }
}; };
diesel::update(p::photos.find(photo.id)) diesel::update(p::photos.find(photo.id))
.set((p::width.eq(width as i32), p::height.eq(height as i32))) .set((
.execute(db)?; p::width.eq(width as i32),
debug!("Store img #{} size {} x {}", photo.id, width, height); p::height.eq(height as i32),
))
.execute(&db)?;
debug!("Store img #{} size {} x {}", photo.id, width, height);
}
} }
}
let e = start.elapsed(); let e = start.elapsed();
info!( info!(
"Found size of {} images in {}.{:03} s", "Found size of {} images in {}.{:03} s",
c, c,
e.as_secs(), e.as_secs(),
e.subsec_millis(), e.subsec_millis(),
); );
Ok(()) Ok(())
}
} }
fn save_photo( fn save_photo(

View File

@ -1,10 +1,48 @@
use super::result::Error; use super::result::Error;
use crate::models::Photo; use crate::models::Photo;
use crate::DbOpt;
use diesel::pg::PgConnection; use diesel::pg::PgConnection;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::result::Error as DieselError; use diesel::result::Error as DieselError;
use diesel::update; use diesel::update;
use std::fs::File;
use std::io::prelude::*; use std::io::prelude::*;
use std::io::{self, BufReader};
use structopt::clap::ArgGroup;
use structopt::StructOpt;
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
#[structopt(raw(group = "ArgGroup::with_name(\"spec\").required(true)"))]
pub struct Makepublic {
#[structopt(flatten)]
db: DbOpt,
/// Image path to make public
#[structopt(group = "spec")]
image: Option<String>,
/// File listing image paths to make public
#[structopt(long, short, group = "spec")]
list: Option<String>,
}
impl Makepublic {
pub fn run(&self) -> Result<(), Error> {
let db = self.db.connect()?;
match (self.list.as_ref().map(AsRef::as_ref), &self.image) {
(Some("-"), None) => {
let list = io::stdin();
by_file_list(&db, list.lock())?;
Ok(())
}
(Some(list), None) => {
let list = BufReader::new(File::open(list)?);
by_file_list(&db, list)
}
(None, Some(image)) => one(&db, image),
_ => Err(Error::Other("bad command".to_string())),
}
}
}
pub fn one(db: &PgConnection, tpath: &str) -> Result<(), Error> { pub fn one(db: &PgConnection, tpath: &str) -> Result<(), Error> {
use crate::schema::photos::dsl::*; use crate::schema::photos::dsl::*;

View File

@ -3,64 +3,79 @@ use crate::models::Photo;
use crate::photosdir::PhotosDir; use crate::photosdir::PhotosDir;
use crate::schema::photos::dsl::{date, is_public}; use crate::schema::photos::dsl::{date, is_public};
use crate::server::SizeTag; use crate::server::SizeTag;
use diesel::pg::PgConnection; use crate::{CacheOpt, DbOpt, DirOpt};
use diesel::prelude::*; use diesel::prelude::*;
use log::{debug, info}; use log::{debug, info};
use r2d2_memcache::memcache::Client; use r2d2_memcache::memcache::Client;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use structopt::StructOpt;
/// Make sure all photos are stored in the cache. #[derive(StructOpt)]
/// pub struct Args {
/// The work are intentionally handled sequentially, to not #[structopt(flatten)]
/// overwhelm the host while precaching. cache: CacheOpt,
/// The images are handled in public first, new first order, to have #[structopt(flatten)]
/// the probably most requested images precached as soon as possible. db: DbOpt,
pub fn precache( #[structopt(flatten)]
db: &PgConnection, photos: DirOpt,
pd: &PhotosDir,
max_secs: u64, /// Max time (in seconds) to work.
) -> Result<(), Error> { #[structopt(default_value = "10")]
let max_time = Duration::from_secs(max_secs); max_time: u64,
let timer = Instant::now(); }
let mut cache = Client::connect("memcache://127.0.0.1:11211")?;
let size = SizeTag::Small; impl Args {
let (mut n, mut n_stored) = (0, 0); /// Make sure all photos are stored in the cache.
let photos = Photo::query(true) ///
.order((is_public.desc(), date.desc().nulls_last())) /// The work are intentionally handled sequentially, to not
.load::<Photo>(db)?; /// overwhelm the host while precaching.
let no_expire = 0; /// The images are handled in public first, new first order, to have
for photo in photos { /// the probably most requested images precached as soon as possible.
n += 1; pub fn run(&self) -> Result<(), Error> {
let key = &photo.cache_key(size); let max_time = Duration::from_secs(self.max_time);
if cache.get::<Vec<u8>>(key)?.is_none() { let timer = Instant::now();
let size = size.px(); let mut cache = Client::connect(self.cache.memcached_url.as_ref())?;
let data = pd.scale_image(&photo, size, size).map_err(|e| { let size = SizeTag::Small;
Error::Other(format!( let (mut n, mut n_stored) = (0, 0);
"Failed to scale #{} ({}): {}", let photos = Photo::query(true)
photo.id, photo.path, e, .order((is_public.desc(), date.desc().nulls_last()))
)) .load::<Photo>(&self.db.connect()?)?;
})?; let no_expire = 0;
cache.set(key, &data[..], no_expire)?; let pd = PhotosDir::new(&self.photos.photos_dir);
debug!("Cache: stored {} for {}", key, photo.path); for photo in photos {
n_stored += 1; n += 1;
if timer.elapsed() > max_time { let key = &photo.cache_key(size);
break; if cache.get::<Vec<u8>>(key)?.is_none() {
} let size = size.px();
if n_stored % 64 == 0 { let data =
info!( pd.scale_image(&photo, size, size).map_err(|e| {
"Checked {} images in cache, added {}, in {:.1?}.", Error::Other(format!(
n, "Failed to scale #{} ({}): {}",
n_stored, photo.id, photo.path, e,
timer.elapsed() ))
); })?;
cache.set(key, &data[..], no_expire)?;
debug!("Cache: stored {} for {}", key, photo.path);
n_stored += 1;
if timer.elapsed() > max_time {
break;
}
if n_stored % 64 == 0 {
info!(
"Checked {} images in cache, added {}, in {:.1?}.",
n,
n_stored,
timer.elapsed()
);
}
} }
} }
info!(
"Checked {} images in cache, added {}, in {:.1?}.",
n,
n_stored,
timer.elapsed()
);
Ok(())
} }
info!(
"Checked {} images in cache, added {}, in {:.1?}.",
n,
n_stored,
timer.elapsed()
);
Ok(())
} }

View File

@ -1,32 +0,0 @@
use std::env::var;
use std::path::PathBuf;
use std::process::exit;
pub fn dburl() -> String {
require_var("DATABASE_URL", "Database url")
}
#[allow(dead_code)]
pub fn jwt_key() -> String {
require_var("JWT_KEY", "Signing key for jwt")
}
pub fn require_var(name: &str, desc: &str) -> String {
match var(name) {
Ok(result) => result,
Err(error) => {
println!("{} needed in {}: {}", desc, name, error);
exit(1);
}
}
}
#[allow(dead_code)]
pub fn photos_dir() -> PathBuf {
PathBuf::from(&*env_or("RPHOTOS_DIR", "/home/kaj/Bilder/foto"))
}
#[allow(dead_code)]
pub fn env_or(name: &str, default: &str) -> String {
var(name).unwrap_or_else(|_err| default.to_string())
}

View File

@ -1,4 +1,5 @@
use crate::models::{Coord, Place}; use crate::models::{Coord, Place};
use crate::DbOpt;
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::result::{DatabaseErrorKind, Error as DieselError}; use diesel::result::{DatabaseErrorKind, Error as DieselError};
@ -6,6 +7,50 @@ use log::{debug, info, warn};
use reqwest::{self, Client, Response}; use reqwest::{self, Client, Response};
use serde_json::Value; use serde_json::Value;
use slug::slugify; use slug::slugify;
use structopt::StructOpt;
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub struct Fetchplaces {
#[structopt(flatten)]
db: DbOpt,
/// Max number of photos to use for --auto
#[structopt(long, short, default_value = "5")]
limit: i64,
/// Fetch data for photos with position but lacking places.
#[structopt(long, short)]
auto: bool,
/// Image ids to fetch place data for
photos: Vec<i32>,
}
impl Fetchplaces {
pub fn run(&self) -> Result<(), super::adm::result::Error> {
let db = self.db.connect()?;
if self.auto {
println!("Should find {} photos to fetch places for", self.limit);
use crate::schema::photo_places::dsl as place;
use crate::schema::positions::dsl as pos;
let result = pos::positions
.select((pos::photo_id, (pos::latitude, pos::longitude)))
.filter(pos::photo_id.ne_all(
place::photo_places.select(place::photo_id).distinct(),
))
.order(pos::photo_id.desc())
.limit(self.limit)
.load::<(i32, Coord)>(&db)?;
for (photo_id, coord) in result {
println!("Find places for #{}, {:?}", photo_id, coord);
update_image_places(&db, photo_id)?;
}
} else {
for photo in &self.photos {
update_image_places(&db, *photo)?;
}
}
Ok(())
}
}
pub fn update_image_places(c: &PgConnection, image: i32) -> Result<(), Error> { pub fn update_image_places(c: &PgConnection, image: i32) -> Result<(), Error> {
use crate::schema::positions::dsl::*; use crate::schema::positions::dsl::*;

View File

@ -4,7 +4,6 @@
extern crate diesel; extern crate diesel;
mod adm; mod adm;
mod env;
mod fetch_places; mod fetch_places;
mod models; mod models;
mod myexif; mod myexif;
@ -16,121 +15,95 @@ mod server;
use crate::adm::result::Error; use crate::adm::result::Error;
use crate::adm::stats::show_stats; use crate::adm::stats::show_stats;
use crate::adm::{findphotos, makepublic, precache, storestatics, users}; use crate::adm::{findphotos, makepublic, precache, storestatics, users};
use crate::env::{dburl, photos_dir};
use crate::models::Coord;
use crate::photosdir::PhotosDir;
use clap::{App, Arg, ArgMatches, SubCommand};
use diesel::pg::PgConnection; use diesel::pg::PgConnection;
use diesel::prelude::*; use diesel::prelude::*;
use dotenv::dotenv; use dotenv::dotenv;
use std::fs::File; use std::path::PathBuf;
use std::io::{self, BufReader};
use std::path::Path;
use std::process::exit; use std::process::exit;
use structopt::StructOpt;
/// Command line interface for rphotos.
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
enum RPhotos {
/// Make specific image(s) public.
///
/// The image path(s) are relative to the image root.
Makepublic(makepublic::Makepublic),
/// Get place tags for photos by looking up coordinates in OSM
Fetchplaces(fetch_places::Fetchplaces),
/// Find new photos in the photo directory
Findphotos(findphotos::Findphotos),
/// Find sizes of images lacking that info in db
FindSizes(findphotos::FindSizes),
/// Make sure the photos has thumbnails stored in cache.
///
/// The time limit is checked after each stored image, so the
/// command will complete in slightly more than the max time and
/// one image will be processed even if the max time is zero.
Precache(precache::Args),
/// Show some statistics from the database
Stats(DbOpt),
/// Store statics as files for a web server
Storestatics {
/// Directory to store the files in
dir: String,
},
/// List existing users
Userlist {
#[structopt(flatten)]
db: DbOpt,
},
/// Set password for a (new or existing) user
Userpass {
#[structopt(flatten)]
db: DbOpt,
/// Username to set password for
// TODO: Use a special type that only accepts nice user names.
user: String,
},
/// Run the rphotos web server.
Runserver(server::Args),
}
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
struct CacheOpt {
/// How to connect to memcached.
#[structopt(
long,
env = "MEMCACHED_SERVER",
default_value = "memcache://127.0.0.1:11211"
)]
memcached_url: String,
}
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
struct DbOpt {
/// How to connect to the postgres database.
#[structopt(long, env = "DATABASE_URL", hide_env_values = true)]
db_url: String,
}
impl DbOpt {
fn connect(&self) -> Result<PgConnection, ConnectionError> {
PgConnection::establish(&self.db_url)
}
}
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
struct DirOpt {
/// Path to the root directory storing all actual photos.
#[structopt(long, env = "RPHOTOS_DIR")]
photos_dir: PathBuf,
}
fn main() { fn main() {
dotenv().ok(); dotenv().ok();
env_logger::init(); env_logger::init();
let args = App::new("rphotos") match run(&RPhotos::from_args()) {
.version(env!("CARGO_PKG_VERSION"))
.about("Command line interface for rphotos")
.subcommand(
SubCommand::with_name("findphotos")
.about("Find new photos in the photo directory")
.arg(Arg::with_name("BASE").multiple(true).help(
"Base directory to search in (relative to the \
image root).",
)),
).subcommand(
SubCommand::with_name("find-sizes")
.about("Find sizes of images lacking that info in db"),
).subcommand(
SubCommand::with_name("stats")
.about("Show some statistics from the database"),
).subcommand(
SubCommand::with_name("userlist").about("List existing users"),
).subcommand(
SubCommand::with_name("userpass")
.about("Set password for a (new or existing) user")
.arg(
Arg::with_name("USER")
.required(true)
.help("Username to set password for"),
),
).subcommand(
SubCommand::with_name("fetchplaces")
.about("Get place tags for photos by looking up coordinates in OSM")
.arg(
Arg::with_name("LIMIT")
.long("limit")
.short("l")
.takes_value(true)
.default_value("5")
.help("Max number of photos to use for --auto")
).arg(
Arg::with_name("AUTO")
.long("auto")
.help("Fetch data for photos with position but \
lacking places.")
).arg(
Arg::with_name("PHOTOS")
.required_unless("AUTO").multiple(true)
.help("Image ids to fetch place data for"),
),
).subcommand(
SubCommand::with_name("makepublic")
.about("make specific image(s) public")
.arg(
Arg::with_name("LIST")
.long("list")
.short("l")
.takes_value(true)
.help("File listing image paths to make public"),
).arg(
Arg::with_name("IMAGE")
.required_unless("LIST")
.help("Image path to make public"),
).after_help(
"The image path(s) are relative to the image root.",
),
).subcommand(
SubCommand::with_name("precache")
.about("Make sure the photos has thumbnails stored in cache.")
.arg(
Arg::with_name("MAXTIME")
.long("max-time")
.default_value("10")
.help("Max time (in seconds) to work")
).after_help(
"The time limit is checked after each stored image, \
so the command will complete in slightly more than \
the max time and one image will be processed even \
if the max time is zero."
)
).subcommand(
SubCommand::with_name("storestatics")
.about("Store statics as files for a web server")
.arg(
Arg::with_name("DIR")
.required(true)
.help("Directory to store the files in"),
),
).subcommand(
SubCommand::with_name("runserver")
.arg(
Arg::with_name("PIDFILE")
.long("pidfile")
.takes_value(true)
.help(
"Write (and read, if --replace) a pid file with \
the name given as <PIDFILE>.",
),
).arg(Arg::with_name("REPLACE").long("replace").help(
"Kill old server (identified by pid file) before running",
)),
).get_matches();
match run(&args) {
Ok(()) => (), Ok(()) => (),
Err(err) => { Err(err) => {
println!("{}", err); println!("{}", err);
@ -139,97 +112,19 @@ fn main() {
} }
} }
fn run(args: &ArgMatches) -> Result<(), Error> { fn run(args: &RPhotos) -> Result<(), Error> {
match args.subcommand() { match args {
("findphotos", Some(args)) => { RPhotos::Findphotos(cmd) => cmd.run(),
let pd = PhotosDir::new(photos_dir()); RPhotos::FindSizes(cmd) => cmd.run(),
let db = get_db()?; RPhotos::Makepublic(cmd) => cmd.run(),
if let Some(bases) = args.values_of("BASE") { RPhotos::Stats(db) => show_stats(&db.connect()?),
for base in bases { RPhotos::Userlist { db } => users::list(&db.connect()?),
findphotos::crawl(&db, &pd, Path::new(&base)).map_err( RPhotos::Userpass { db, user } => users::passwd(&db.connect()?, user),
|e| { RPhotos::Fetchplaces(cmd) => cmd.run(),
Error::Other(format!( RPhotos::Precache(cmd) => cmd.run(),
"Failed to crawl {}: {}", RPhotos::Storestatics { dir } => storestatics::to_dir(dir),
base, e, RPhotos::Runserver(ra) => server::run(ra),
))
},
)?;
}
} else {
findphotos::crawl(&db, &pd, Path::new("")).map_err(|e| {
Error::Other(format!("Failed to crawl: {}", e))
})?;
}
Ok(())
}
("find-sizes", Some(_args)) => {
findphotos::find_sizes(&get_db()?, &PhotosDir::new(photos_dir()))
}
("makepublic", Some(args)) => {
let db = get_db()?;
match args.value_of("LIST") {
Some("-") => {
let list = io::stdin();
makepublic::by_file_list(&db, list.lock())?;
Ok(())
}
Some(f) => {
let list = File::open(f)?;
let list = BufReader::new(list);
makepublic::by_file_list(&db, list)
}
None => makepublic::one(&db, args.value_of("IMAGE").unwrap()),
}
}
("stats", Some(_args)) => show_stats(&get_db()?),
("userlist", Some(_args)) => users::list(&get_db()?),
("fetchplaces", Some(args)) => {
let db = get_db()?;
if args.is_present("AUTO") {
let limit = args.value_of("LIMIT").unwrap().parse()?;
println!("Should find {} photos to fetch places for", limit);
use crate::schema::photo_places::dsl as place;
use crate::schema::positions::dsl as pos;
let result = pos::positions
.select((pos::photo_id, (pos::latitude, pos::longitude)))
.filter(pos::photo_id.ne_all(
place::photo_places.select(place::photo_id).distinct(),
))
.order(pos::photo_id.desc())
.limit(limit)
.load::<(i32, Coord)>(&db)?;
for (photo_id, coord) in result {
println!("Find places for #{}, {:?}", photo_id, coord);
fetch_places::update_image_places(&db, photo_id)?;
}
} else {
for photo in args.values_of("PHOTOS").unwrap() {
fetch_places::update_image_places(&db, photo.parse()?)?;
}
}
Ok(())
}
("userpass", Some(args)) => {
users::passwd(&get_db()?, args.value_of("USER").unwrap())
}
("precache", Some(args)) => precache::precache(
&get_db()?,
&PhotosDir::new(photos_dir()),
args.value_of("MAXTIME").unwrap().parse()?,
),
("storestatics", Some(args)) => {
storestatics::to_dir(args.value_of("DIR").unwrap())
}
("runserver", Some(args)) => server::run(args),
_ => {
println!("No subcommand given.\n\n{}", args.usage());
Ok(())
}
} }
} }
fn get_db() -> Result<PgConnection, ConnectionError> {
PgConnection::establish(&dburl())
}
include!(concat!(env!("OUT_DIR"), "/templates.rs")); include!(concat!(env!("OUT_DIR"), "/templates.rs"));

View File

@ -11,8 +11,10 @@ pub struct PhotosDir {
} }
impl PhotosDir { impl PhotosDir {
pub fn new(basedir: PathBuf) -> Self { pub fn new(basedir: &Path) -> Self {
PhotosDir { basedir } PhotosDir {
basedir: basedir.into(),
}
} }
#[allow(dead_code)] #[allow(dead_code)]

View File

@ -1,4 +1,3 @@
use crate::env::photos_dir;
use crate::photosdir::PhotosDir; use crate::photosdir::PhotosDir;
use crypto::sha2::Sha256; use crypto::sha2::Sha256;
use diesel::pg::PgConnection; use diesel::pg::PgConnection;
@ -8,6 +7,7 @@ use log::{debug, error, warn};
use r2d2_memcache::r2d2::Error; use r2d2_memcache::r2d2::Error;
use r2d2_memcache::MemcacheConnectionManager; use r2d2_memcache::MemcacheConnectionManager;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use warp::filters::{cookie, BoxedFilter}; use warp::filters::{cookie, BoxedFilter};
@ -23,10 +23,15 @@ type PooledMemcache = PooledConnection<MemcacheConnectionManager>;
pub fn create_session_filter( pub fn create_session_filter(
db_url: &str, db_url: &str,
memcache_server: &str, memcache_server: &str,
jwt_secret: String, photos_dir: &Path,
jwt_secret: &str,
) -> BoxedFilter<(Context,)> { ) -> BoxedFilter<(Context,)> {
let global = let global = Arc::new(GlobalContext::new(
Arc::new(GlobalContext::new(db_url, memcache_server, jwt_secret)); db_url,
memcache_server,
photos_dir,
jwt_secret,
));
warp::any() warp::any()
.and(path::full()) .and(path::full())
.and(cookie::optional("EXAUTH")) .and(cookie::optional("EXAUTH"))
@ -53,7 +58,12 @@ struct GlobalContext {
} }
impl GlobalContext { impl GlobalContext {
fn new(db_url: &str, memcache_server: &str, jwt_secret: String) -> Self { fn new(
db_url: &str,
memcache_server: &str,
photos_dir: &Path,
jwt_secret: &str,
) -> Self {
let db_manager = ConnectionManager::<PgConnection>::new(db_url); let db_manager = ConnectionManager::<PgConnection>::new(db_url);
let mc_manager = MemcacheConnectionManager::new(memcache_server); let mc_manager = MemcacheConnectionManager::new(memcache_server);
GlobalContext { GlobalContext {
@ -61,12 +71,12 @@ impl GlobalContext {
.connection_timeout(Duration::from_secs(1)) .connection_timeout(Duration::from_secs(1))
.build(db_manager) .build(db_manager)
.expect("Posgresql pool"), .expect("Posgresql pool"),
photosdir: PhotosDir::new(photos_dir()), photosdir: PhotosDir::new(photos_dir),
memcache_pool: Pool::builder() memcache_pool: Pool::builder()
.connection_timeout(Duration::from_secs(1)) .connection_timeout(Duration::from_secs(1))
.build(mc_manager) .build(mc_manager)
.expect("Memcache pool"), .expect("Memcache pool"),
jwt_secret, jwt_secret: jwt_secret.into(),
} }
} }

View File

@ -10,13 +10,12 @@ pub use self::context::Context;
use self::render_ructe::RenderRucte; use self::render_ructe::RenderRucte;
use self::splitlist::*; use self::splitlist::*;
use self::views_by_date::*; use self::views_by_date::*;
use super::{CacheOpt, DbOpt, DirOpt};
use crate::adm::result::Error; use crate::adm::result::Error;
use crate::env::{dburl, env_or, jwt_key};
use crate::models::{Person, Photo, Place, Tag}; use crate::models::{Person, Photo, Place, Tag};
use crate::pidfiles::handle_pid_file; use crate::pidfiles::handle_pid_file;
use crate::templates::{self, Html}; use crate::templates::{self, Html};
use chrono::Datelike; use chrono::Datelike;
use clap::ArgMatches;
use diesel::prelude::*; use diesel::prelude::*;
use djangohashers; use djangohashers;
use image; use image;
@ -24,10 +23,40 @@ use log::info;
use mime; use mime;
use serde::Deserialize; use serde::Deserialize;
use std::net::SocketAddr; use std::net::SocketAddr;
use structopt::StructOpt;
use warp::filters::path::Tail; use warp::filters::path::Tail;
use warp::http::{header, Response, StatusCode}; use warp::http::{header, Response, StatusCode};
use warp::{self, reply, Filter, Rejection, Reply}; use warp::{self, reply, Filter, Rejection, Reply};
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub struct Args {
#[structopt(flatten)]
db: DbOpt,
#[structopt(flatten)]
cache: CacheOpt,
#[structopt(flatten)]
photos: DirOpt,
/// Write (and read, if --replace) a pid file with the name
/// given as <PIDFILE>.
#[structopt(long)]
pidfile: Option<String>,
/// Kill old server (identified by pid file) before running.
#[structopt(long, short)]
replace: bool,
/// Socket addess for rphotos to listen on.
#[structopt(
long,
env = "RPHOTOS_LISTEN",
default_value = "127.0.0.1:6767"
)]
listen: SocketAddr,
/// Signing key for jwt
#[structopt(long, env = "JWT_KEY", hide_env_values = true)]
jwt_key: String,
}
pub struct PhotoLink { pub struct PhotoLink {
pub title: Option<String>, pub title: Option<String>,
pub href: String, pub href: String,
@ -147,14 +176,15 @@ impl PhotoLink {
} }
} }
pub fn run(args: &ArgMatches) -> Result<(), Error> { pub fn run(args: &Args) -> Result<(), Error> {
if let Some(pidfile) = args.value_of("PIDFILE") { if let Some(pidfile) = &args.pidfile {
handle_pid_file(pidfile, args.is_present("REPLACE")).unwrap() handle_pid_file(&pidfile, args.replace).unwrap()
} }
let session_filter = create_session_filter( let session_filter = create_session_filter(
&dburl(), &args.db.db_url,
&env_or("MEMCACHED_SERVER", "memcache://127.0.0.1:11211"), &args.cache.memcached_url,
jwt_key(), &args.photos.photos_dir,
&args.jwt_key,
); );
let s = move || session_filter.clone(); let s = move || session_filter.clone();
use warp::filters::query::query; use warp::filters::query::query;
@ -190,10 +220,7 @@ pub fn run(args: &ArgMatches) -> Result<(), Error> {
.or(get().and(path("ac")).and(path("tag")).and(s()).and(query()).map(auto_complete_tag)) .or(get().and(path("ac")).and(path("tag")).and(s()).and(query()).map(auto_complete_tag))
.or(get().and(path("ac")).and(path("person")).and(s()).and(query()).map(auto_complete_person)) .or(get().and(path("ac")).and(path("person")).and(s()).and(query()).map(auto_complete_person))
.or(path("adm").and(admin::routes(s()))); .or(path("adm").and(admin::routes(s())));
let addr = env_or("RPHOTOS_LISTEN", "127.0.0.1:6767") warp::serve(routes.recover(customize_error)).run(args.listen);
.parse::<SocketAddr>()
.map_err(|e| Error::Other(format!("{}", e)))?;
warp::serve(routes.recover(customize_error)).run(addr);
Ok(()) Ok(())
} }