// Based on https://github.com/jD91mZM2/termplay
// MIT License
use image::{RgbaImage, DynamicImage, GenericImageView};
use base64;
use termion::color::{Bg, Fg, Rgb};
#[cfg(feature = "video")]
use termion::input::TermRead;
#[cfg(feature = "video")]
use gstreamer::prelude::*;
#[cfg(feature = "video")]
use gstreamer_app;
use failure::{Error, format_err};
use std::io::Write;
#[cfg(feature = "video")]
use std::sync::{Arc, RwLock};
pub type MResult<T> = Result<T, Error>;
fn main() -> MResult<()> {
let args = std::env::args().collect::<Vec<String>>();
let xsize: usize = args.get(1)
.expect("Provide xsize")
.parse::<usize>()
.unwrap();
let ysize = args.get(2)
.expect("provide ysize")
.parse()
.unwrap();
let mut xpix = args.get(3)
.expect("provide xsize in pixels")
.parse::<usize>()
.unwrap();
let mut ypix = args.get(4)
.expect("provide ysize in pixels")
.parse::<usize>()
.unwrap();
let cell_ratio = args.get(5)
.expect("Provide cell ratio")
.parse::<f32>()
.unwrap();
let preview_type = args.get(6)
.expect("Provide preview type")
.parse::<String>()
.unwrap();
#[allow(unused_variables)]
let autoplay = args.get(7)
.expect("Autoplay?")
.parse::<bool>()
.unwrap();
#[allow(unused_variables)]
let mute = args.get(8)
.expect("Muted?")
.parse::<bool>()
.unwrap();
let target = args.get(9)
.expect("Render target?")
.parse::<String>()
.unwrap();
let path = args.get(10).expect("Provide path");
let target = match target.as_str() {
#[cfg(feature = "sixel")]
"sixel" => RenderTarget::Sixel,
"kitty" => RenderTarget::Kitty,
"auto" => {
let term = std::env::var("TERM").unwrap_or(String::from(""));
match term.as_str() {
"kitty" => RenderTarget::Kitty,
#[cfg(feature = "sixel")]
"xterm" => RenderTarget::Sixel,
_ => RenderTarget::Unicode,
}
}
_ => RenderTarget::Unicode
};
if target == RenderTarget::Unicode {
xpix = xsize;
ypix = ysize * 2;
}
let renderer = Renderer::new(target,
xsize,
ysize,
xpix,
ypix,
cell_ratio);
let result =
match preview_type.as_ref() {
#[cfg(feature = "video")]
"video" => video_preview(path,
renderer,
autoplay,
mute),
"image" => image_preview(path,
renderer),
#[cfg(feature = "video")]
"audio" => audio_preview(path,
autoplay,
mute),
#[cfg(feature = "video")]
_ => { panic!("Available types: video/image/audio") }
#[cfg(not(feature = "video"))]
_ => { panic!("Available type: image") }
};
if result.is_err() {
println!("{:?}", &result);
result
} else {
Ok(())
}
}
fn image_preview(path: &str,
renderer: Renderer) -> MResult<()> {
let img = image::open(&path)?;
let max_size = renderer.max_size_pix(&img);
let img = img.resize_exact(max_size.0 as u32,
max_size.1 as u32,
image::FilterType::Gaussian)
.to_rgba();
renderer.send_image(&img)?;
Ok(())
}
trait ImgSize {
fn size(&self) -> MResult<(usize, usize)>;
}
#[cfg(feature = "video")]
impl ImgSize for gstreamer::Sample {
fn size(&self) -> MResult<(usize, usize)> {
let size =