Compare commits
No commits in common. "main" and "v0.1.0" have entirely different histories.
5 changed files with 12 additions and 67 deletions
27
.github/workflows/build.yaml
vendored
27
.github/workflows/build.yaml
vendored
|
@ -1,27 +0,0 @@
|
||||||
# build.yaml
|
|
||||||
# ---
|
|
||||||
# Builds the rust code and verifies that it compiles
|
|
||||||
|
|
||||||
name: build
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
paths:
|
|
||||||
- '**.rs'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: install dependencies
|
|
||||||
run: |
|
|
||||||
apt update -y
|
|
||||||
apt install -y curl gcc g++ make
|
|
||||||
- uses: https://github.com/actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- uses: https://github.com/actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --verbose --all
|
|
|
@ -1,3 +1,10 @@
|
||||||
# Bumblebee
|
# Bumblebee
|
||||||
|
|
||||||
Bumblebee is a simple wrapper around FFMPEG for transcoding media.
|
Bumblebee is a simple wrapper around FFMPEG for transcoding media.
|
||||||
|
|
||||||
|
## (main) TODO
|
||||||
|
main things that need to be done.
|
||||||
|
|
||||||
|
- [x] make initial behaviour work.
|
||||||
|
- [ ] make archiving system work.
|
||||||
|
- [ ] refactor code so it makes sense.
|
|
@ -1,7 +1,6 @@
|
||||||
[files]
|
[files]
|
||||||
input_path = "/data/input"
|
input_path = "/data/input"
|
||||||
output_path = "/data/output"
|
output_path = "/data/output"
|
||||||
keep_directory_structure = true # keep directory structure when outputting files
|
|
||||||
include = [ 'mp4', 'avi', 'mkv' ] # file extensions to include
|
include = [ 'mp4', 'avi', 'mkv' ] # file extensions to include
|
||||||
|
|
||||||
[files.cleanup]
|
[files.cleanup]
|
||||||
|
|
|
@ -32,7 +32,6 @@ pub struct ConfigFilesCleanup {
|
||||||
pub struct ConfigFiles {
|
pub struct ConfigFiles {
|
||||||
pub input_path: String,
|
pub input_path: String,
|
||||||
pub output_path: String,
|
pub output_path: String,
|
||||||
pub keep_directory_structure: Option<bool>,
|
|
||||||
pub include: Vec<String>,
|
pub include: Vec<String>,
|
||||||
pub cleanup: Option<ConfigFilesCleanup>,
|
pub cleanup: Option<ConfigFilesCleanup>,
|
||||||
}
|
}
|
||||||
|
@ -52,7 +51,6 @@ impl Config {
|
||||||
files: ConfigFiles {
|
files: ConfigFiles {
|
||||||
input_path: String::from("/data/input"),
|
input_path: String::from("/data/input"),
|
||||||
output_path: String::from("/data/output"),
|
output_path: String::from("/data/output"),
|
||||||
keep_directory_structure: None,
|
|
||||||
include: Vec::new(),
|
include: Vec::new(),
|
||||||
cleanup: None,
|
cleanup: None,
|
||||||
},
|
},
|
||||||
|
|
40
src/main.rs
40
src/main.rs
|
@ -23,7 +23,7 @@ fn main() {
|
||||||
trace!("Config: {:#?}", &config);
|
trace!("Config: {:#?}", &config);
|
||||||
|
|
||||||
debug!("commit: {}", env!("GIT_COMMIT_HASH"));
|
debug!("commit: {}", env!("GIT_COMMIT_HASH"));
|
||||||
debug!("tag: {}", env!("GIT_TAG"));
|
debug!("version: {}", env!("GIT_TAG"));
|
||||||
|
|
||||||
let input_files = files::get_files(&config.files.input_path)
|
let input_files = files::get_files(&config.files.input_path)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -47,21 +47,9 @@ fn start_transcode_run(input_files: &Vec<PathBuf>, config: &Config) -> i32 {
|
||||||
error!("Failed to process file {}: {}", job.input, remarks);
|
error!("Failed to process file {}: {}", job.input, remarks);
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut output_path = if let Some(keep_directory_structure) = config.files.keep_directory_structure {
|
let mut output_path = file.clone();
|
||||||
if keep_directory_structure {
|
|
||||||
let mut output_path = PathBuf::from(&config.files.output_path);
|
|
||||||
output_path.push(file.strip_prefix(&config.files.input_path).unwrap());
|
|
||||||
output_path
|
|
||||||
} else {
|
|
||||||
PathBuf::from(&config.files.output_path)
|
|
||||||
.join(file.file_name().unwrap())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
PathBuf::from(&config.files.output_path)
|
|
||||||
.join(file.file_name().unwrap())
|
|
||||||
};
|
|
||||||
output_path.set_extension(&config.ffmpeg.output.format);
|
output_path.set_extension(&config.ffmpeg.output.format);
|
||||||
|
let output_path = PathBuf::from(&config.files.output_path).join(output_path.file_name().unwrap()); // TODO: This is a bit of a mess.
|
||||||
|
|
||||||
let job = TranscodeJob::new(
|
let job = TranscodeJob::new(
|
||||||
file.to_str().unwrap(),
|
file.to_str().unwrap(),
|
||||||
|
@ -99,27 +87,7 @@ fn start_transcode_run(input_files: &Vec<PathBuf>, config: &Config) -> i32 {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(cleanup) = &config.files.cleanup {
|
// TODO: Cleanup
|
||||||
if !cleanup.enabled { () }
|
|
||||||
match cleanup.original_cleanup_behavior {
|
|
||||||
configuration::ConfigFilesCleanupOriginalBehavior::delete => {
|
|
||||||
if let Err(e) = std::fs::remove_file(&job.input) {
|
|
||||||
error!("Failed to delete original file {}: {}", job.input, e);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
configuration::ConfigFilesCleanupOriginalBehavior::archive => {
|
|
||||||
let mut archive_path = PathBuf::from(&cleanup.archive.path);
|
|
||||||
archive_path.push(job.input.strip_prefix(&config.files.input_path).unwrap());
|
|
||||||
if let Err(e) = std::fs::create_dir_all(archive_path.parent().unwrap()) {
|
|
||||||
error!("Failed to create archive directory {}: {}", archive_path.parent().unwrap().to_str().unwrap(), e);
|
|
||||||
}
|
|
||||||
if let Err(e) = std::fs::rename(&job.input, &archive_path) {
|
|
||||||
error!("Failed to archive original file {}: {}", job.input, e);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
configuration::ConfigFilesCleanupOriginalBehavior::keep => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
info!("Finished processing file {}.", job.input);
|
info!("Finished processing file {}.", job.input);
|
||||||
success_count += 1;
|
success_count += 1;
|
||||||
|
|
Loading…
Reference in a new issue