This commit is contained in:
Johannes Heuel
2022-06-25 17:16:13 +02:00
commit 9449d992c0
33 changed files with 5100 additions and 0 deletions

1396
backend/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

23
backend/Cargo.toml Normal file
View File

@@ -0,0 +1,23 @@
[package]
name = "backend"
version = "0.1.0"
edition = "2021"
license = "MIT"
description = " "
default-run = "backend"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
actix-web = "4"
actix-web-lab = "^0"
actix-files = "0.6"
env_logger = "0.9.0"
log = "0.4"
diesel = { version = "1.4.8", features = ["postgres", "r2d2"] }
diesel_migrations = "1.4"
dotenv = "0.15.0"
walkdir = "2"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
common = { path = "../common" }

5
backend/diesel.toml Normal file
View File

@@ -0,0 +1,5 @@
# For documentation on how to configure this file,
# see diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"

View File

@@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@@ -0,0 +1 @@
DROP TABLE pictures

View File

@@ -0,0 +1,18 @@
CREATE TABLE pictures (
id SERIAL PRIMARY KEY,
filepath VARCHAR NOT NULL,
created_at INTEGER,
focal_length VARCHAR,
shutter_speed VARCHAR,
width INTEGER NOT NULL,
height INTEGER NOT NULL,
make VARCHAR,
model VARCHAR,
lens VARCHAR,
orientation VARCHAR,
fnumber FLOAT,
iso INTEGER,
exposure_program VARCHAR,
exposure_compensation VARCHAR,
thumbnail VARCHAR
)

16
backend/src/actions.rs Normal file
View File

@@ -0,0 +1,16 @@
extern crate diesel;
use backend::models::*;
use diesel::prelude::*;
use backend::*;
type DbError = Box<dyn std::error::Error + Send + Sync>;
pub fn list_pictures(conn: &PgConnection) -> Result<Vec<Picture>, DbError> {
use self::schema::pictures::dsl::*;
Ok(pictures
.limit(50)
.order_by(created_at.desc())
.load::<Picture>(conn)?)
}

View File

@@ -0,0 +1,238 @@
extern crate diesel;
use backend::create_picture;
use backend::models::NewPicture;
use backend::establish_connection;
// use backend::*;
use std::ffi::OsStr;
use std::path::Path;
use std::time::UNIX_EPOCH;
use std::{fs, process::Command};
use walkdir::{DirEntry, WalkDir};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
struct PhotoExif {
#[serde(default, alias = "FocalLength")]
focal_length: Option<String>,
#[serde(default, alias = "ShutterSpeed")]
shutter_speed: Option<String>,
#[serde(alias = "ImageWidth")]
width: i32,
#[serde(alias = "ImageHeight")]
height: i32,
#[serde(default, alias = "Make")]
make: Option<String>,
#[serde(default, alias = "Model")]
model: Option<String>,
#[serde(default, alias = "LensID")]
lens: Option<String>,
#[serde(default, alias = "Orientation")]
orientation: Option<String>,
#[serde(default, alias = "FNumber")]
fnumber: Option<f64>,
#[serde(default, alias = "ExposureProgram")]
exposure_program: Option<String>,
#[serde(default, alias = "CreateDate")]
created_at: Option<i32>,
#[serde(default, alias = "ISO")]
iso: Option<i32>,
#[serde(default = "MaybeString::default", alias = "ExposureCompensation")]
exposure_compensation: MaybeString,
}
#[derive(Serialize, Deserialize)]
#[serde(untagged)]
enum MaybeString {
Number(i32),
Str(String),
None,
}
impl MaybeString {
fn default() -> MaybeString {
MaybeString::None
}
fn to_opt_string(&self) -> Option<String> {
if let MaybeString::Str(exp_comp) = &self {
Some(exp_comp.clone())
} else {
None
}
}
}
fn is_hidden(entry: &DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| s.starts_with('.'))
.unwrap_or(false)
}
fn is_image(entry: &DirEntry) -> bool {
let allowed_extensions = ["cr2", "cr3", "jpg", "jpeg"];
let extension = if let Some(ext) = entry.path().extension() {
ext
} else {
OsStr::new("")
};
if allowed_extensions
.iter()
.all(|&v| v != extension.to_ascii_lowercase())
{
return false;
}
true
}
static PICTURE_PATH: &str = "./pictures";
static LIBRARY_PATH: &str = "./examples";
fn main() {
let connection = establish_connection();
WalkDir::new(LIBRARY_PATH)
.into_iter()
.filter_map(Result::ok)
.filter(|e| !e.file_type().is_dir())
.filter(|e| !is_hidden(e))
.filter(is_image)
.into_iter()
.for_each(|path| {
let thumbnail = if let Ok(t) = extract_preview(path.path()) {
t
} else {
println!("Could not create thumbnail");
return;
};
let thumbnail = std::path::PathBuf::from(thumbnail.strip_prefix(PICTURE_PATH).unwrap());
let output = Command::new("exiftool")
.arg("-j")
.arg("-d")
.arg("%s")
.arg(path.path())
.output()
.expect("failed to execute exiftool");
let pel: Vec<PhotoExif> = serde_json::from_slice(&output.stdout).unwrap();
let pe = &pel[0];
println!("pe = {}", serde_json::to_string_pretty(pe).unwrap());
let created_at: Option<i32> = if let Some(c) = pe.created_at {
Some(c)
} else {
let metadata = fs::metadata(&path.path()).unwrap();
if let Ok(time) = metadata.created() {
Some(time.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs()
.try_into()
.unwrap())
} else {
println!("Not supported on this platform or filesystem");
None
}
};
let filepath = path.path().to_string_lossy().into_owned();
let new_picture = NewPicture {
filepath: filepath.clone(),
created_at: created_at.clone(),
focal_length: pe.focal_length.clone(),
shutter_speed: pe.shutter_speed.clone(),
width: pe.width,
height: pe.height,
make: pe.make.clone(),
model: pe.model.clone(),
lens: pe.lens.clone(),
orientation: pe.orientation.clone(),
fnumber: pe.fnumber,
iso: pe.iso,
exposure_program: pe.exposure_program.clone(),
exposure_compensation: pe.exposure_compensation.to_opt_string(),
thumbnail: Some(thumbnail.into_os_string().into_string().unwrap()),
};
let pic = create_picture(&connection, new_picture);
println!(
"Created picture with filepath={} and id={}",
filepath, pic
);
});
}
fn extract_preview(path: &Path) -> Result<std::path::PathBuf, Box<dyn std::error::Error>> {
let file_name = if let Some(p) = path.file_name() {
p
} else {
OsStr::new("")
};
let parent = if let Some(p) = path.parent() {
p
} else {
Path::new(LIBRARY_PATH)
};
let relative_parent = parent
.strip_prefix(LIBRARY_PATH)
.expect("Could not remove prefix");
let thumb_path = Path::new(PICTURE_PATH).join(relative_parent);
if !thumb_path.exists() {
fs::create_dir_all(&thumb_path).unwrap_or_else(|e| {
panic!("Could not create directory {}: {}", thumb_path.display(), e)
});
}
let mut thumbnail = thumb_path.join(file_name);
thumbnail.set_extension("jpg");
let extension = path.extension().unwrap();
let jpegs = ["jpg", "jpeg"];
if jpegs.iter().any(|&x| x == extension.to_ascii_lowercase()) {
match fs::copy(path, &thumbnail) {
Ok(_it) => return Ok(thumbnail),
Err(err) => return Err(err.into()),
};
}
let _output_thumb = Command::new("exiftool")
.arg("-if")
.arg("$jpgfromraw")
.arg("-b")
.arg("-jpgfromraw")
.arg("-w")
.arg(thumb_path.join("%f.jpg"))
.arg("-execute")
.arg("-if")
.arg("$previewimage")
.arg("-b")
.arg("-previewimage")
.arg("-w")
.arg(thumb_path.join("%f.jpg"))
.arg("-execute")
.arg("-tagsfromfile")
.arg("@")
.arg("-srcfile")
.arg(thumb_path.join("%f.jpg"))
.arg("-overwrite_original")
.arg("-common_args")
.arg("--ext")
.arg("jpg")
.arg(path)
.output()
.expect("failed to execute exiftool to extract thumbnail");
// println!("{:?}", _output_thumb);
if thumbnail.exists() {
Ok(thumbnail)
} else {
Err("Could not create thumbnail".into())
}
}

View File

@@ -0,0 +1,21 @@
extern crate diesel;
use self::models::*;
use diesel::prelude::*;
use backend::*;
fn main() {
use self::schema::pictures::dsl::*;
let connection = establish_connection();
let results = pictures
.limit(5)
.load::<Picture>(&connection)
.expect("Error loading pictures");
println!("Displaying {} pictures", results.len());
for picture in results {
println!("filepath: {}", picture.filepath);
println!("\tid: {}", picture.id);
}
}

30
backend/src/lib.rs Normal file
View File

@@ -0,0 +1,30 @@
#[macro_use]
extern crate diesel;
extern crate dotenv;
pub mod models;
pub mod schema;
use self::models::{NewPicture};
use diesel::prelude::*;
use diesel::pg::PgConnection;
use dotenv::dotenv;
use std::env;
pub fn establish_connection() -> PgConnection {
dotenv().ok();
let database_url = env::var("DATABASE_URL")
.expect("DATABASE_URL must be set");
PgConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url))
}
pub fn create_picture<'a>(conn: &PgConnection, new_picture: NewPicture) -> usize {
use schema::pictures;
diesel::insert_into(pictures::table)
.values(&new_picture)
.execute(conn)
.expect("Error saving new picture")
}

132
backend/src/main.rs Normal file
View File

@@ -0,0 +1,132 @@
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
use actix_files as fs;
use actix_web::{Responder, Result};
use actix_web_lab::web::spa;
use backend::establish_connection;
use actix_web::{
// get, middleware, post, web, App, Error, HttpRequest, HttpResponse, HttpServer,
get,
middleware,
web,
App,
HttpServer,
};
use diesel::prelude::*;
use diesel::r2d2::{self, ConnectionManager};
// use uuid::Uuid;
mod actions;
mod models;
mod schema;
use common::OutputPicture;
type DbPool = r2d2::Pool<ConnectionManager<PgConnection>>;
// type DbError = Box<dyn std::error::Error + Send + Sync>;
#[get("/api/pictures/")]
async fn get_pictures(pool: web::Data<DbPool>) -> Result<impl Responder> {
let conn = pool.get().unwrap();
let pics = if let Ok(p) = actions::list_pictures(&conn) {
p
} else {
vec![]
};
let pics: Vec<OutputPicture> = pics
.iter()
.map(|x| OutputPicture {
thumbnail: x.thumbnail.clone(),
width: x.width.try_into().unwrap(),
height: x.height.try_into().unwrap(),
})
.collect();
Ok(web::Json(pics))
}
embed_migrations!("migrations");
#[actix_web::main]
async fn main() -> std::io::Result<()> {
dotenv::dotenv().ok();
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
let connection = establish_connection();
// // This will run the necessary migrations.
// embedded_migrations::run(&connection).expect("Could not migrate database.");
// By default the output is thrown out. If you want to redirect it to stdout, you
// should call embedded_migrations::run_with_output.
embedded_migrations::run_with_output(&connection, &mut std::io::stdout())
.expect("Could not migrate database.");
// set up database connection pool
let conn_spec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<PgConnection>::new(conn_spec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
let host = "0.0.0.0";
let port = 8081;
log::info!("starting HTTP server at http://{}:{}", host, port);
// Start HTTP server
HttpServer::new(move || {
App::new()
// set up DB pool to be used with web::Data<Pool> extractor
.app_data(web::Data::new(pool.clone()))
.wrap(middleware::Logger::default())
.service(get_pictures)
.service(fs::Files::new("/api/pictures/", "./pictures/"))
.service(
spa()
.index_file("./dist/index.html")
.static_resources_mount("/")
.static_resources_location("./dist")
.finish(),
)
// .service(fs::Files::new("/", STATIC_FILES_PATH).index_file("index.html"))
// .service(get_user)
// .service(add_user)
})
.bind((host, port))?
.run()
.await
}
#[cfg(test)]
mod tests {
use super::*;
use actix_web::test;
#[actix_web::test]
async fn user_routes() {
std::env::set_var("RUST_LOG", "actix_web=debug");
env_logger::init();
dotenv::dotenv().ok();
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<PgConnection>::new(connspec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
let _app = test::init_service(
App::new()
.app_data(web::Data::new(pool.clone()))
.wrap(middleware::Logger::default()), // .service(get_user)
// .service(add_user),
)
.await;
}
}

43
backend/src/models.rs Normal file
View File

@@ -0,0 +1,43 @@
use serde::Serialize;
use super::schema::pictures;
#[derive(Queryable, Serialize)]
pub struct Picture {
pub id: i32,
pub filepath: String,
pub created_at: Option<i32>,
pub focal_length: Option<String>,
pub shutter_speed: Option<String>,
pub width: i32,
pub height: i32,
pub make: Option<String>,
pub model: Option<String>,
pub lens: Option<String>,
pub orientation: Option<String>,
pub fnumber: Option<f64>,
pub iso: Option<i32>,
pub exposure_program: Option<String>,
pub exposure_compensation: Option<String>,
pub thumbnail: Option<String>,
}
#[derive(Insertable)]
#[table_name = "pictures"]
pub struct NewPicture {
pub filepath: String,
pub created_at: Option<i32>,
pub focal_length: Option<String>,
pub shutter_speed: Option<String>,
pub width: i32,
pub height: i32,
pub make: Option<String>,
pub model: Option<String>,
pub lens: Option<String>,
pub orientation: Option<String>,
pub fnumber: Option<f64>,
pub iso: Option<i32>,
pub exposure_program: Option<String>,
pub exposure_compensation: Option<String>,
pub thumbnail: Option<String>,
}

20
backend/src/schema.rs Normal file
View File

@@ -0,0 +1,20 @@
table! {
pictures (id) {
id -> Int4,
filepath -> Varchar,
created_at -> Nullable<Int4>,
focal_length -> Nullable<Varchar>,
shutter_speed -> Nullable<Varchar>,
width -> Int4,
height -> Int4,
make -> Nullable<Varchar>,
model -> Nullable<Varchar>,
lens -> Nullable<Varchar>,
orientation -> Nullable<Varchar>,
fnumber -> Nullable<Float8>,
iso -> Nullable<Int4>,
exposure_program -> Nullable<Varchar>,
exposure_compensation -> Nullable<Varchar>,
thumbnail -> Nullable<Varchar>,
}
}