summaryrefslogtreecommitdiffstats
path: root/src/hunter-media.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/hunter-media.rs')
-rw-r--r--src/hunter-media.rs1009
1 files changed, 632 insertions, 377 deletions
diff --git a/src/hunter-media.rs b/src/hunter-media.rs
index cc1de41..585f3d8 100644
--- a/src/hunter-media.rs
+++ b/src/hunter-media.rs
@@ -1,8 +1,7 @@
// Based on https://github.com/jD91mZM2/termplay
// MIT License
-use image::{FilterType, DynamicImage, GenericImageView};
-use sixel::encoder::Encoder;
+use image::{RgbaImage, DynamicImage, GenericImageView};
use base64;
use termion::color::{Bg, Fg, Rgb};
@@ -11,7 +10,7 @@ use termion::input::TermRead;
#[cfg(feature = "video")]
-use gstreamer::{self, prelude::*};
+use gstreamer::prelude::*;
#[cfg(feature = "video")]
use gstreamer_app;
@@ -36,50 +35,65 @@ fn main() -> MResult<()> {
.expect("provide ysize")
.parse()
.unwrap();
- let xpix = args.get(3)
+ let mut xpix = args.get(3)
.expect("provide xsize in pixels")
.parse::<usize>()
.unwrap();
- let ypix = args.get(4)
+ let mut ypix = args.get(4)
.expect("provide ysize in pixels")
.parse::<usize>()
.unwrap();
- let preview_type = args.get(5)
+ let cell_ratio = args.get(5)
+ .expect("Provide cell ratio")
+ .parse::<f32>()
+ .unwrap();
+ let preview_type = args.get(6)
.expect("Provide preview type")
.parse::<String>()
.unwrap();
- // #[cfg(feature = "video")]
- let autoplay = args.get(6)
+ let autoplay = args.get(7)
.expect("Autoplay?")
.parse::<bool>()
.unwrap();
- // #[cfg(feature = "video")]
- let mute = args.get(7)
+ let mute = args.get(8)
.expect("Muted?")
.parse::<bool>()
.unwrap();
- let sixel = args.get(8)
- .expect("Use SIXEL?")
- .parse::<bool>()
+ let target = args.get(9)
+ .expect("Render target?")
+ .parse::<String>()
.unwrap();
- let path = args.get(9).expect("Provide path");
-
- let target = if sixel {
- if std::env::var("TERM") == Ok(String::from("xterm-kitty")) {
- RenderTarget::Kitty
- } else {
- RenderTarget::Sixel
+ let path = args.get(10).expect("Provide path");
+
+ let target = match target.as_str() {
+ #[cfg(feature = "sixel")]
+ "sixel" => RenderTarget::Sixel,
+ "kitty" => RenderTarget::Kitty,
+ "auto" => {
+ let term = std::env::var("TERM").unwrap_or(String::from(""));
+ match term.as_str() {
+ "kitty" => RenderTarget::Kitty,
+ #[cfg(feature = "sixel")]
+ "xterm" => RenderTarget::Sixel,
+ _ => RenderTarget::Unicode,
+ }
}
- } else {
- RenderTarget::Unicode
+ _ => RenderTarget::Unicode
};
+ if target == RenderTarget::Unicode {
+ xpix = xsize;
+ ypix = ysize * 2;
+ }
+
+
let renderer = Renderer::new(target,
xsize,
ysize,
xpix,
- ypix);
+ ypix,
+ cell_ratio);
let result =
match preview_type.as_ref() {
@@ -115,70 +129,95 @@ fn main() -> MResult<()> {
fn image_preview(path: &str,
renderer: Renderer) -> MResult<()> {
let img = image::open(&path)?;
+ let max_size = renderer.max_size_pix(&img);
+ let img = img.resize_exact(max_size.0 as u32,
+ max_size.1 as u32,
+ image::FilterType::Gaussian)
+ .to_rgba();
renderer.send_image(&img)?;
Ok(())
}
+trait ImgSize {
+ fn size(&self) -> MResult<(usize, usize)>;
+}
+
+#[cfg(feature = "video")]
+impl ImgSize for gstreamer::Sample {
+ fn size(&self) -> MResult<(usize, usize)> {
+ let size = || {
+ let caps = self.as_ref().get_caps()?;
+ let caps = caps.get_structure(0)?;
+ let width = caps.get::<i32>("width")? as usize;
+ let height = caps.get::<i32>("height")? as usize;
+ Some((width, height))
+ };
+ size().ok_or(format_err!("Can't get size from sample!"))
+ }
+}
+
+impl ImgSize for RgbaImage {
+ fn size(&self) -> MResult<(usize, usize)> {
+ let width = self.width() as usize;
+ let height = self.height() as usize;
+ Ok((width, height))
+ }
+}
+
+impl ImgSize for DynamicImage {
+ fn size(&self) -> MResult<(usize, usize)> {
+ let width = self.width() as usize;
+ let height = self.height() as usize;
+ Ok((width, height))
+ }
+}
+
+
#[cfg(feature = "video")]
fn video_preview(path: &String,
renderer: Renderer,
autoplay: bool,
mute: bool)
-> MResult<()> {
- let low_fps = renderer.target == RenderTarget::Sixel;
-
- let (player, appsink) = make_gstreamer(low_fps)?;
-
- let uri = format!("file://{}", &path);
-
- player.set_property("uri", &uri)?;
-
+ let gst = Gstreamer::new(path)?;
let renderer = Arc::new(RwLock::new(renderer));
let crenderer = renderer.clone();
+ let cgst = gst.clone();
+ gst.process_first_frame(&renderer)?;
-
-
-
- let p = player.clone();
-
- appsink.set_callbacks(
+ gst.appsink.set_callbacks(
gstreamer_app::AppSinkCallbacks::new()
.new_sample({
move |sink| {
+ let renderer = crenderer.clone();
+ let gst = cgst.clone();
+
let sample = match sink.pull_sample() {
Some(sample) => sample,
- None => return gstreamer::FlowReturn::Eos,
+ None => return Err(gstreamer::FlowError::Eos)
};
- let position = p.query_position::<gstreamer::ClockTime>()
- .map(|p| p.seconds().unwrap_or(0))
- .unwrap_or(0);
+ let pos = gst.position();
+ let dur = gst.duration();
- let duration = p.query_duration::<gstreamer::ClockTime>()
- .map(|d| d.seconds().unwrap_or(0))
- .unwrap_or(0);
-
- let renderer = crenderer.clone();
std::thread::spawn(move || {
- renderer.write()
- .map(|mut r| r.send_frame(&*sample,
- position,
- duration)).ok()
+ // This will lock make sure only one frame is being sent
+ // at a time
+ renderer.try_write()
+ .map(|mut r| r.new_frame(sample,
+ pos,
+ dur).unwrap())
+ .map_err(|_| {
+ // But if processing takes too long, reduce rate
+ let rate = gst.get_rate().unwrap();
+ gst.set_rate(rate-1)
+ }).ok();
});
- if autoplay == false {
- // Just render first frame to get a static image
- match p.set_state(gstreamer::State::Paused)
- .into_result() {
- Ok(_) => gstreamer::FlowReturn::Eos,
- Err(_) => gstreamer::FlowReturn::Error
- }
- } else {
- gstreamer::FlowReturn::Ok
- }
+ Ok(gstreamer::FlowSuccess::Ok)
}
})
.eos({
@@ -189,25 +228,24 @@ fn video_preview(path: &String,
.build()
);
- if mute == true || autoplay == false {
- player.set_property("volume", &0.0)?;
- }
- player.set_state(gstreamer::State::Playing).into_result()?;
-
-
-
+ // Flush pipeline and restart with corrent resizing
+ gst.stop()?;
+ if autoplay {
+ gst.start(mute)?;
+ } else {
+ gst.pause()?;
+ gst.send_preroll(&renderer)?;
+ }
- read_keys(player, Some(renderer))?;
+ read_keys(gst.clone(), Some(renderer))?;
Ok(())
}
#[cfg(feature = "video")]
-fn read_keys(player: gstreamer::Element,
+fn read_keys(gst: Gstreamer,
renderer: Option<Arc<RwLock<Renderer>>>) -> MResult<()> {
- let seek_time = gstreamer::ClockTime::from_seconds(5);
-
let stdin = std::io::stdin();
let mut stdin = stdin.lock();
@@ -218,68 +256,65 @@ fn read_keys(player: gstreamer::Element,
match input.as_str() {
- "q" => std::process::exit(0),
+ "q" => return gst.stop(),
">" => {
- if let Some(mut time) = player
- .query_position::<gstreamer::ClockTime>() {
- time += seek_time;
-
- player.seek_simple(
- gstreamer::SeekFlags::FLUSH,
- gstreamer::format::GenericFormattedValue::from_time(time)
- )?;
+ gst.seek_forward()?;
+ renderer.as_ref().map(|r| {
+ if gst.get_state() == gstreamer::State::Paused {
+ gst.send_preroll(&r).unwrap();
}
+ });
},
"<" => {
- if let Some(mut time) = player
- .query_position::<gstreamer::ClockTime>() {
- if time >= seek_time {
- time -= seek_time;
- } else {
- time = gstreamer::ClockTime(Some(0));
- }
-
- player.seek_simple(
- gstreamer::SeekFlags::FLUSH,
- gstreamer::format::GenericFormattedValue::from_time(time)
- )?;
- }
- }
- "p" => {
- player.set_state(gstreamer::State::Playing).into_result()?;
-
- // To actually start playing again
- if let Some(time) = player
- .query_position::<gstreamer::ClockTime>() {
- player.seek_simple(
- gstreamer::SeekFlags::FLUSH,
- gstreamer::format::GenericFormattedValue::from_time(time)
- )?;
+ gst.seek_backward()?;
+ renderer.as_ref().map(|r| {
+ if gst.get_state() == gstreamer::State::Paused {
+ gst.send_preroll(&r).unwrap();
}
+ });
}
- "a" => {
- player.set_state(gstreamer::State::Paused).into_result()?;
- }
- "m" => {
- player.set_property("volume", &0.0)?;
- }
- "u" => {
- player.set_property("volume", &1.0)?;
- }
- // TODO add pixel size
+ "p" => gst.play()?,
+ "a" => gst.pause()?,
+ "m" => gst.mute()?,
+ "u" => gst.unmute()?,
"xy" => {
if let Some(ref renderer) = renderer {
- let xsize = stdin.read_line()?;
- let ysize = stdin.read_line()?;
-
- let xsize = xsize.unwrap_or(String::from("0")).parse::<usize>()?;
- let ysize = ysize.unwrap_or(String::from("0")).parse::<usize>()?;
+ let xsize = stdin.read_line()?
+ .unwrap_or(String::from("0"))
+ .parse::<usize>()?;;
+ let ysize = stdin.read_line()?
+ .unwrap_or(String::from("0"))
+ .parse::<usize>()?;;
+ let mut xpix = stdin.read_line()?
+ .unwrap_or(String::from("0"))
+ .parse::<usize>()?;;
+ let mut ypix = stdin.read_line()?
+ .unwrap_or(String::from("0"))
+ .parse::<usize>()?;;
+ let cell_ratio = stdin.read_line()?
+ .unwrap_or(String::from("0"))
+ .parse::<f32>()?;;
let mut renderer = renderer
.write()
.map_err(|_| format_err!("Renderer RwLock failed!"))?;
- renderer.set_size(xsize, ysize)?;
+ if renderer.target == RenderTarget::Unicode {
+ xpix = xsize;
+ ypix = ysize*2;
+ }
+
+
+ renderer.set_widget_size(xsize, ysize, xpix, ypix, cell_ratio)?;
+ match renderer.last_frame {
+ Some(ref sample) => {
+ let (max_x, max_y) = renderer.max_size_pix(sample);
+ gst.set_scaling(max_x, max_y)?;
+ }
+ _ => {}
+ }
+
+
}
}
_ => {}
@@ -292,12 +327,8 @@ pub fn audio_preview(path: &String,
autoplay: bool,
mute: bool)
-> MResult<()> {
- let (player, _) = make_gstreamer(false)?;
-
- let uri = format!("file://{}", &path);
-
- player.set_property("uri", &uri)?;
- let p = player.clone();
+ let gst = Gstreamer::new(path)?;
+ let tgst = gst.clone();
// Since events don't work with audio files...
std::thread::spawn(move || -> MResult<()> {
@@ -306,14 +337,10 @@ pub fn audio_preview(path: &String,
let mut stdout = std::io::stdout();
loop {
std::thread::sleep(sleep_duration);
+ let gst = tgst.clone();
- let position = p.query_position::<gstreamer::ClockTime>()
- .map(|p| p.seconds().unwrap_or(0))
- .unwrap_or(0);
-
- let duration = p.query_duration::<gstreamer::ClockTime>()
- .map(|d| d.seconds().unwrap_or(0))
- .unwrap_or(0);
+ let position = gst.position();
+ let duration = gst.duration();
// Just redo loop until position changes
if last_pos == Some(position) {
@@ -333,353 +360,581 @@ pub fn audio_preview(path: &String,
});
- if mute == true || autoplay == false{
- player.set_property("volume", &0.0)?;
- } else {
- player.set_state(gstreamer::State::Playing).into_result()?;
+ if autoplay && !mute {
+ gst.start(mute)?;
}
- read_keys(player, None)?;
+ read_keys(gst, None)?;
Ok(())
}
#[cfg(feature = "video")]
-pub fn make_gstreamer(low_fps: bool) -> MResult<(gstreamer::Element,
- gstreamer_app::AppSink)> {
- gstreamer::init()?;
+#[derive(Clone)]
+struct Gstreamer {
+ player: gstreamer::Element,
+ appsink: gstreamer_app::AppSink,
+ videorate: gstreamer::Element,
+}
- let player = gstreamer::ElementFactory::make("playbin", None)
- .ok_or(format_err!("Can't create playbin"))?;
+#[cfg(feature = "video")]
+impl Gstreamer {
+ fn new(file: &str) -> MResult<Gstreamer> {
+ use gstreamer::{Element, ElementFactory, GhostPad, Bin};
+ gstreamer::init()?;
- let videorate = gstreamer::ElementFactory::make("videorate", None)
- .ok_or(format_err!("Can't create videorate element"))?;
+ let player = ElementFactory::make("playbin", None)
+ .ok_or(format_err!("Can't create playbin"))?;
- let pnmenc = gstreamer::ElementFactory::make("pnmenc", None)
- .ok_or(format_err!("Can't create PNM-encoder"))?;
+ let videorate = ElementFactory::make("videorate", None)
+ .ok_or(format_err!("Can't create videorate element"))?;
- let sink = gstreamer::ElementFactory::make("appsink", None)
- .ok_or(format_err!("Can't create appsink"))?;
+ let sink = ElementFactory::make("appsink", None)
+ .ok_or(format_err!("Can't create appsink"))?;
- let appsink = sink.clone()
- .downcast::<gstreamer_app::AppSink>()
- .unwrap();
+ let appsink = sink.clone()
+ .downcast::<gstreamer_app::AppSink>()
+ .unwrap();
+ let elems = &[&videorate, //&videoscale,
+ &sink];
- if low_fps {
- videorate.set_property("max-rate", &10)?;
- } else {
- videorate.set_property("max-rate", &30)?;
+ let bin = Bin::new(None);
+
+ bin.add_many(elems)?;
+ Element::link_many(elems)?;
+
+ // make input for bin point to first element
+ let sink = elems[0].get_static_pad("sink").unwrap();
+ let ghost = GhostPad::new(Some("sink"), &sink)
+ .ok_or(format_err!("Can't create GhostPad"))?;
+
+ ghost.set_active(true)?;
+ bin.add_pad(&ghost)?;
+
+ appsink.set_drop(true);
+ appsink.set_max_buffers(4);
+
+ videorate.set_property("drop-only", &true)?;
+ //videorate.set_property("max-rate", &1)?;
+
+ let uri = format!("file://{}", &file);
+
+ player.set_property("video-sink", &bin.upcast::<gstreamer::Element>())?;
+ player.set_property("uri", &uri)?;
+
+ use gstreamer::prelude::*;
+
+ Ok(Gstreamer {
+ player,
+ appsink,
+ videorate,
+ })
+ }
+
+ pub fn change_format(&self, format: gstreamer::Caps) -> MResult<()> {
+ use gstreamer::Element;
+ use gstreamer_video::prelude::*;
+
+ let state = self.get_state();
+ self.pause()?;
+
+
+ let appsink = self.appsink.clone()
+ .upcast::<Element>();
+
+ Element::unlink_many(&[&self.videorate, &appsink]);
+
+ self.appsink.set_caps(Some(&format));
+
+ Element::link_many(&[&self.videorate, &appsink])?;
+
+ std::thread::sleep(std::time::Duration::from_millis(100));
+ self.player.set_state(state)?;
+
+
+
+ Ok(())
}
- let elems = &[&videorate, &pnmenc, &sink];
+ pub fn process_first_frame(&self,
+ renderer: &Arc<RwLock<Renderer>>) -> MResult<()> {
+ self.pause()?;
+
+ let sample = self.appsink.pull_preroll()
+ .ok_or_else(|| format_err!("Couldn't read first frame!"))?;
+
+ let (max_x, max_y) = renderer.read()
+ .map_err(|_| format_err!("Failed at locking renderer!"))?
+ .max_size_pix(&sample);
- let bin = gstreamer::Bin::new(None);
- bin.add_many(elems)?;
- gstreamer::Element::link_many(elems)?;
+ self.set_scaling(max_x, max_y)?;
- // make input for bin point to first element
- let sink = elems[0].get_static_pad("sink").unwrap();
- let ghost = gstreamer::GhostPad::new("sink", &sink)
- .ok_or(format_err!("Can't create GhostPad"))?;
+ Ok(())
+ }
- ghost.set_active(true)?;
- bin.add_pad(&ghost)?;
- player.set_property("video-sink", &bin.upcast::<gstreamer::Element>())?;
+ pub fn send_preroll(&self,
+ renderer: &Arc<RwLock<Renderer>>) -> MResult<()> {
+ let appsink = self.appsink.downcast_ref::<gstreamer_app::AppSink>().unwrap();
+ let sample = appsink.pull_preroll().unwrap();
+ let pos = self.position();
+ let dur = self.duration();
+ renderer.write().unwrap().new_frame(sample, pos, dur)
+ }
+
+ pub fn set_scaling(&self, x: usize, y: usize) -> MResult<()> {
+ use gstreamer::Caps;
- Ok((player, appsink))
+ let caps =
+ format!("video/x-raw,format=RGBA,width={},height={}",
+ x,
+ y);
+ let caps = Caps::from_string(&caps).unwrap();
+
+ self.change_format(caps)
+ }
+
+ pub fn get_rate(&self) -> MResult<i32> {
+ let rate = self.videorate
+ .get_property("max-rate")?
+ .downcast::<i32>().unwrap()
+ .get()
+ .ok_or_else(|| format_err!("No video rate???"))?;
+
+ if rate == 2147483647 {
+ // Sane defalt fps cap if rendering is too slow
+ Ok(30)
+ } else {
+ Ok(rate)
+ }
+ }
+
+ pub fn set_rate(&self, rate: i32) -> MResult<()> {
+ self.videorate.set_property("max-rate", &rate)?;
+ Ok(())
+ }
+
+ pub fn position(&self) -> usize {
+ self.player.query_position::<gstreamer::ClockTime>()
+ .map(|p| p.seconds().unwrap_or(0))
+ .unwrap_or(0) as usize
+ }
+
+ pub fn duration(&self) -> usize {
+ self.player.query_duration::<gstreamer::ClockTime>()
+ .map(|d| d.seconds().unwrap_or(0))
+ .unwrap_or(0) as usize
+ }
+
+ pub fn set_state(&self, state: gstreamer::State) -> MResult<()> {
+ self.player.set_state(state)?;
+ // HACK: How to sync properly?
+ std::thread::sleep(std::time::Duration::from_millis(100));
+
+ Ok(())
+ }
+
+ pub fn pause(&self) -> MResult<()> {
+ self.set_state(gstreamer::State::Paused)?;
+
+ Ok(())
+ }
+
+ pub fn mute(&self) -> MResult<()> {
+ Ok(self.player.set_property("volume", &0.0)?)
+ }
+
+ pub fn unmute(&self) -> MResult<()> {
+ Ok(self.player.set_property("volume", &1.0)?)
+ }
+
+ pub fn get_state(&self) -> gstreamer::State {
+ let timeout = gstreamer::ClockTime::from_seconds(1);
+ let state = self.player.get_state(timeout);
+
+ state.1
+ }
+
+ pub fn start(&self, mute: bool) -> MResult<()> {
+ if mute {
+ self.mute()?;
+ }
+ self.play()
+ }
+
+ pub fn play(&self) -> MResult<()> {
+ self.set_state(gstreamer::State::Playing)
+ }
+
+ pub fn stop(&self) -> MResult<()> {
+ self.set_state(gstreamer::State::Ready)
+ }
+
+ pub fn seek_forward(&self) -> MResult<()> {
+ let seek_time = gstreamer::ClockTime::from_seconds(5);
+ if let Some(mut time) = self.player
+ .query_position::<gstreamer::ClockTime>() {
+ time += seek_time;
+
+ self.player.seek_simple(
+ gstreamer::SeekFlags::FLUSH,
+ gstreamer::format::GenericFormattedValue::Time(time)
+ )?;
+ }
+ Ok(())
+ }
+
+ pub fn seek_backward(&self) -> MResult<()> {
+ let seek_time = gstreamer::ClockTime::from_seconds(5);
+ if let Some(mut time) = self.player
+ .query_position::<gstreamer::ClockTime>() {
+ if time >= seek_time {
+ time -= seek_time;
+ } else {
+ time = gstreamer::ClockTime(Some(0));
+ }
+
+ self.player.seek_simple(
+ gstreamer::SeekFlags::FLUSH,
+ gstreamer::format::GenericFormattedValue::Time(time)
+ )?;
+ }
+ Ok(())
+ }
+}
+
+
+trait WithRaw {
+ fn with_raw(&self,
+ fun: impl FnOnce(&[u8]) -> MResult<()>)
+ -> MResult<()>;
+}
+
+#[cfg(feature = "video")]
+impl WithRaw for gstreamer::Sample {
+ fn with_raw(&self,
+ fun: impl FnOnce(&[u8]) -> MResult<()>)
+ -> MResult<()> {
+ let buffer = self.get_buffer()
+ .ok_or(format_err!("Couldn't get buffer from frame!"))?;
+
+ let map = buffer.map_readable()
+ .ok_or(format_err!("Couldn't get buffer from frame!"))?;
+
+ fun(map.as_slice())
+ }
+}
+
+// Mostly for plain old images, since they come from image::open
+impl WithRaw for RgbaImage {
+ fn with_raw(&self,
+ fun: impl FnOnce(&[u8]) -> MResult<()>)
+ -> MResult<()> {
+ let bytes = self.as_flat_samples();
+
+ fun(bytes.as_slice())
+ }
}
#[derive(PartialEq)]
enum RenderTarget {
Unicode,
+ #[cfg(feature = "sixel")]
Sixel,
Kitty
}
+impl RenderTarget {
+ fn send_image(&self,
+ img: &(impl WithRaw+ImgSize),
+ context: &Renderer) -> MResult<()> {
+ match self {
+ #[cfg(feature = "sixel")]
+ RenderTarget::Sixel => self.print_sixel(img)?,
+ RenderTarget::Unicode => self.print_unicode(img)?,
+ RenderTarget::Kitty => self.print_kitty(img, context)?
+ }
+ Ok(())
+ }
+
+ fn print_unicode(&self, img: &(impl WithRaw+ImgSize)) -> MResult<()> {
+ let (xsize, _) = img.size()?;
+
+ img.with_raw(move |raw| -> MResult<()> {
+ let lines = raw.chunks(4*xsize*2).map(|two_lines_colors| {
+ let (upper_line,lower_line) = two_lines_colors.split_at(4*xsize);
+ upper_line.chunks(4)
+ .zip(lower_line.chunks(4))
+ .map(|(upper, lower)| {
+ format!("{}{}▀{}",
+ Fg(Rgb(upper[0], upper[1], upper[2])),
+ Bg(Rgb(lower[0], lower[1], lower[2])),
+ termion::style::Reset
+ )
+ }).collect::<String>()
+ }).collect::<Vec<String>>();
+
+ for line in lines {
+ println!("{}", line);
+ }
+
+ println!("");
+
+ Ok(())
+ })
+ }
+
+ fn print_kitty(&self,
+ img: &(impl WithRaw+ImgSize),
+ context: &Renderer) -> MResult<()> {
+ let (w,h) = context.max_size(img);
+ let (img_x, img_y) = img.size()?;
+
+ img.with_raw(move |raw| -> MResult<()> {
+ let mut file = std::fs::File::create("/tmp/img.raw.new")?;
+ file.write_all(raw)?;
+ file.flush()?;
+ std::fs::rename("/tmp/img.raw.new", "/tmp/img.raw")?;
+
+ let path = base64::encode("/tmp/img.raw");
+
+ print!("\x1b_Ga=d\x1b\\");
+ println!("\x1b_Gf=32,s={},v={},c={},r={},a=T,t=f;{}\x1b\\",
+ img_x,
+ img_y,
+ w,
+ h,
+ path);
+ println!("");
+
+ Ok(())
+ })
+ }
+
+ #[cfg(feature = "sixel")]
+ fn print_sixel(&self, img: &(impl WithRaw+ImgSize)) -> MResult<()> {
+ use sixel_rs::encoder::{Encoder, QuickFrameBuilder};
+ use sixel_rs::optflags::EncodePolicy;
+
+ let (xpix, ypix) = img.size()?;
+
+ img.with_raw(move |raw| -> MResult<()> {
+ let sixfail = |e| format_err!("Sixel failed with: {:?}", e);
+ let encoder = Encoder::new()
+ .map_err(sixfail)?;;;
+
+ encoder.set_encode_policy(EncodePolicy::Fast)
+ .map_err(sixfail)?;;
+
+ let frame = QuickFrameBuilder::new()
+ .width(xpix)
+ .height(ypix)
+ .format(sixel_sys::PixelFormat::RGBA8888)
+ .pixels(raw.to_vec());
+
+ encoder.encode_bytes(frame)
+ .map_err(sixfail)?;
+
+ // No end of line printed by encoder
+ println!("");
+ println!("");
+
+ Ok(())
+ })
+ }
+}
+
struct Renderer {
- // encoder: RwLock<Encoder>,
target: RenderTarget,
xsize: usize,
ysize: usize,
- xsize_pix: usize,
- ysize_pix: usize,
+ xpix: usize,
+ ypix: usize,
+ cell_ratio: f32,
#[cfg(feature = "video")]
- last_frame: Option<DynamicImage>,
+ last_frame: Option<gstreamer::Sample>,
#[cfg(feature = "video")]
- position: Option<usize>,
+ position: usize,
#[cfg(feature = "video")]
- duration: Option<usize>
+ duration: usize,
}
impl Renderer {
fn new(target: RenderTarget,
xsize: usize,
ysize: usize,
- mut xsize_pix: usize,
- mut ysize_pix: usize) -> Renderer {
-
- if std::env::var("TERM") == Ok(String::from("xterm"))
- && target == RenderTarget::Sixel {
- // xterm has a hard limit on graphics size
- // maybe splitting the image into parts would work?
- if xsize_pix > 1000 { xsize_pix = 1000 };
- if ysize_pix > 1000 { ysize_pix = 1000 };
+ mut xpix: usize,
+ mut ypix: usize,
+ cell_ratio: f32) -> Renderer {
+
+ #[cfg(feature = "sixel")]
+ match std::env::var("TERM") {
+ Ok(term) => {
+ if term == "xterm" &&
+ target == RenderTarget::Sixel {
+ // xterm has a hard limit on graphics size
+ // maybe splitting the image into parts would work?
+ if xpix > 1000 { xpix = 1000 };
+ if ypix > 1000 { ypix = 1000 };
+ }
+ }
+ _ => {}
}
Renderer {
target,
xsize,
ysize,
- xsize_pix,
- ysize_pix,
+ xpix,
+ ypix,
+ cell_ratio,
#[cfg(feature = "video")]
last_frame: None,
#[cfg(feature = "video")]
- position: None,
+ position: 0,
#[cfg(feature = "video")]
- duration: None
+ duration: 0,
}
}
- // TODO: Add pixel size
#[cfg(feature = "video")]
- fn set_size(&mut self, xsize: usize, ysize: usize) -> MResult<()> {
+ fn set_widget_size(&mut self,
+ xsize: usize,
+ ysize: usize,
+ xpix: usize,
+ ypix: usize,
+ cell_ratio: f32) -> MResult<()> {
self.xsize = xsize;
self.ysize = ysize;
+ self.xpix = xpix;
+ self.ypix = ypix;
+ self.cell_ratio = cell_ratio;
- if let Some(ref frame) = self.last_frame {
- let pos = self.position.unwrap_or(0);
- let dur = self.duration.unwrap_or(0);
-
- // Use send_image, because send_frame takes SampleRef
- self.send_image(frame)?;
-
- let stdout = std::io::stdout();
- let mut stdout = stdout.lock();
-
- writeln!(stdout, "")?;
- writeln!(stdout, "{}", pos)?;
- writeln!(stdout, "{}", dur)?;
- }
- Ok(())
- }
-
- fn send_image(&self, image: &DynamicImage) -> MResult<()> {
- match self.target {
- RenderTarget::Sixel => self.print_sixel(image)?,
- RenderTarget::Unicode => self.print_unicode(image)?,
- RenderTarget::Kitty => self.print_kitty(image)?
- }
-
+ self.resend_scaled_frame()?;
Ok(())
}
#[cfg(feature = "video")]
- fn send_frame(&mut self,
- frame: &gstreamer::sample::SampleRef,
- position: u64,
- duration: u64)
- -> MResult<()> {
- let buffer = frame.get_buffer()
- .ok_or(format_err!("Couldn't get buffer from frame!"))?;
- let map = buffer.map_readable()
- .ok_or(format_err!("Couldn't get buffer from frame!"))?;
-
- let stdout = std::io::stdout();
- let mut stdout = stdout.lock();
-
- let img = image::load_from_memory_with_format(&map,
- image::ImageFormat::PNM)?;
- let (_, height) = self.max_size(&img);
-
- match self.target {
- RenderTarget::Sixel => self.print_sixel(&img)?,
- RenderTarget::Unicode => self.print_unicode(&img)?,
- RenderTarget::Kitty => self.print_kitty(&img)?
- }
+ fn send_media_meta(&self, frame: &(impl ImgSize)) -> MResult<()> {
+ let (_, height) = self.max_size(frame);
- self.last_frame = Some(img);
- self.position = Some(position as usize);
- self.duration = Some(duration as usize);
-
- // Empty line means end of frame
- writeln!(stdout, "")?;
-
- // Send size (in rows), position and duration
- writeln!(stdout, "{}", height)?;
- writeln!(stdout, "{}", position)?;
- writeln!(stdout, "{}", duration)?;
+ println!("{}", height+1);
+ println!("{}", self.position);
+ println!("{}", self.duration);
Ok(())
}
- pub fn render_image(&self, image: &DynamicImage) -> Vec<String> {
- use image::Pixel;
- let (xsize, ysize) = self.max_size(&image);
- // double height, because of half-height unicode
- let img = image.resize_exact(xsize as u32,
- ysize as u32 * 2,
- FilterType::Nearest).to_rgba();
- let rows = img.pixels()
- .collect::<Vec<_>>()
- .chunks(xsize as usize)
- .map(|line| line.to_vec())
- .collect::<Vec<Vec<_>>>();
-
- rows.chunks(2)
- .map(|rows| {
- rows[0]
- .iter()
- .zip(rows[1].iter())
- .map(|(upper, lower)| {
- let upper_color = upper.to_rgb();
- let lower_color = lower.to_rgb();
-
- format!("{}{}▀{}",
- Fg(Rgb(upper_color[0], upper_color[1], upper_color[2])),
- Bg(Rgb(lower_color[0], lower_color[1], lower_color[2])),
- termion::style::Reset
- )
- }).collect()
- }).collect()
- }
-
- fn print_unicode(&self, img: &DynamicImage) -> MResult<()> {
- let rendered_img = self.render_image(img);
- let stdout = std::io::stdout();
- let mut stdout = stdout.lock();
-
- for line in rendered_img {
- writeln!(stdout, "{}", line)?;
- }
+ fn send_image(&self, image: &(impl WithRaw+ImgSize)) -> MResult<()> {
+ self.target.send_image(image, &self)?;
Ok(())
}
- fn print_kitty(&self, img: &DynamicImage) -> MResult<()> {
- let w = img.width();
- let h = img.height();
-
- let (max_x, max_y) = self.max_size(img);
-
- let img = img.to_rgb().into_vec();
-
- let mut file = std::fs::File::create("/tmp/img.raw").unwrap();
- file.write_all(&img)?;