Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Blob image renderer #858

Merged
merged 1 commit into from Feb 20, 2017
Merged
Changes from all commits
Commits
File filter...
Filter file types
Jump to…
Jump to file
Failed to load files.

Always

Just for now

Add and integrate a blob image renderer trait.

  • Loading branch information
nical committed Feb 20, 2017
commit 457a25a7a08524bd618f463762f57cd43a90646f
@@ -12,14 +12,19 @@ extern crate webrender_traits;
use app_units::Au;
use euclid::Point2D;
use gleam::gl;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use webrender_traits::{ColorF, Epoch, GlyphInstance};
use std::sync::Arc;
use webrender_traits::{BlobImageResult, BlobImageError, BlobImageDescriptor};
use webrender_traits::{ColorF, Epoch, GlyphInstance, ClipRegion, ImageRendering};
use webrender_traits::{ImageDescriptor, ImageData, ImageFormat, PipelineId};
use webrender_traits::{ImageKey, BlobImageData, BlobImageRenderer, RasterizedBlobImage};
use webrender_traits::{LayoutSize, LayoutPoint, LayoutRect, LayoutTransform, DeviceUintSize};


fn load_file(name: &str) -> Vec<u8> {
let mut file = File::open(name).unwrap();
let mut buffer = vec![];
@@ -84,6 +89,7 @@ fn main() {
resource_override_path: res_path,
debug: true,
precache_shaders: true,
blob_image_renderer: Some(Box::new(FakeBlobImageRenderer::new())),
.. Default::default()
};

@@ -96,6 +102,17 @@ fn main() {
let epoch = Epoch(0);
let root_background_color = ColorF::new(0.3, 0.0, 0.0, 1.0);

let vector_img = api.add_image(
ImageDescriptor {
format: ImageFormat::RGBA8,
width: 100,
height: 100,
stride: None,
is_opaque: true,
},
ImageData::new_blob_image(Vec::new()),
);

let pipeline_id = PipelineId(0, 0);
let mut builder = webrender_traits::DisplayListBuilder::new(pipeline_id);

@@ -116,6 +133,14 @@ fn main() {
LayoutTransform::identity(),
webrender_traits::MixBlendMode::Normal,
Vec::new());
builder.push_image(
LayoutRect::new(LayoutPoint::new(0.0, 0.0), LayoutSize::new(100.0, 100.0)),
ClipRegion::simple(&bounds),
LayoutSize::new(100.0, 100.0),
LayoutSize::new(0.0, 0.0),
ImageRendering::Auto,
vector_img,
);

let sub_clip = {
let mask = webrender_traits::ImageMask {
@@ -255,3 +280,55 @@ fn main() {
}
}
}

struct FakeBlobImageRenderer {
images: HashMap<ImageKey, BlobImageResult>,
}

impl FakeBlobImageRenderer {
fn new() -> Self {
FakeBlobImageRenderer { images: HashMap::new() }
}
}

impl BlobImageRenderer for FakeBlobImageRenderer {
fn request_blob_image(&mut self, key: ImageKey, _: Arc<BlobImageData>, descriptor: &BlobImageDescriptor) {
let mut texels = Vec::with_capacity((descriptor.width * descriptor.height * 4) as usize);
for y in 0..descriptor.height {
for x in 0..descriptor.width {
// render a simple checkerboard pattern
let a = if (x % 20 >= 10) != (y % 20 >= 10) { 255 } else { 0 };
match descriptor.format {
ImageFormat::RGBA8 => {
texels.push(a);
texels.push(a);
texels.push(a);
texels.push(255);
}
ImageFormat::A8 => {
texels.push(a);
}
_ => {
self.images.insert(key,
Err(BlobImageError::Other(format!(
"Usupported image format {:?}",
descriptor.format
)))
);
return;
}
}
}
}

self.images.insert(key, Ok(RasterizedBlobImage {
data: texels,
width: descriptor.width,
height: descriptor.height,
}));
}

fn resolve_blob_image(&mut self, key: ImageKey) -> BlobImageResult {
self.images.remove(&key).unwrap_or(Err(BlobImageError::InvalidKey))
}
}
@@ -21,7 +21,7 @@ use threadpool::ThreadPool;
use webrender_traits::{ApiMsg, AuxiliaryLists, BuiltDisplayList, IdNamespace, ImageData};
use webrender_traits::{PipelineId, RenderNotifier, RenderDispatcher, WebGLCommand, WebGLContextId};
use webrender_traits::channel::{PayloadHelperMethods, PayloadReceiver, PayloadSender, MsgReceiver};
use webrender_traits::{VRCompositorCommand, VRCompositorHandler};
use webrender_traits::{BlobImageRenderer, VRCompositorCommand, VRCompositorHandler};
use offscreen_gl_context::GLContextDispatcher;

/// The render backend is responsible for transforming high level display lists into
@@ -68,9 +68,10 @@ impl RenderBackend {
config: FrameBuilderConfig,
recorder: Option<Box<ApiRecordingReceiver>>,
main_thread_dispatcher: Arc<Mutex<Option<Box<RenderDispatcher>>>>,
blob_image_renderer: Option<Box<BlobImageRenderer>>,
vr_compositor_handler: Arc<Mutex<Option<Box<VRCompositorHandler>>>>) -> RenderBackend {

let resource_cache = ResourceCache::new(texture_cache, workers, enable_aa);
let resource_cache = ResourceCache::new(texture_cache, workers, blob_image_renderer, enable_aa);

register_thread_with_profiler("Backend".to_string());

@@ -48,7 +48,7 @@ use util::TransformedRectKind;
use webrender_traits::{ColorF, Epoch, PipelineId, RenderNotifier, RenderDispatcher};
use webrender_traits::{ExternalImageId, ImageData, ImageFormat, RenderApiSender, RendererKind};
use webrender_traits::{DeviceIntRect, DevicePoint, DeviceIntPoint, DeviceIntSize, DeviceUintSize};
use webrender_traits::ImageDescriptor;
use webrender_traits::{ImageDescriptor, BlobImageRenderer};
use webrender_traits::channel;
use webrender_traits::VRCompositorHandler;

@@ -780,6 +780,8 @@ impl Renderer {
// TODO(gw): Use a heuristic to select best # of worker threads.
Arc::new(Mutex::new(ThreadPool::new_with_name("WebRender:Worker".to_string(), 4)))
});

let blob_image_renderer = options.blob_image_renderer.take();
try!{ thread::Builder::new().name("RenderBackend".to_string()).spawn(move || {
let mut backend = RenderBackend::new(api_rx,
payload_rx,
@@ -794,6 +796,7 @@ impl Renderer {
config,
recorder,
backend_main_thread_dispatcher,
blob_image_renderer,
backend_vr_compositor);
backend.run();
})};
@@ -1756,7 +1759,6 @@ pub trait ExternalImageHandler {
fn release(&mut self, key: ExternalImageId);
}

#[derive(Debug)]
pub struct RendererOptions {
pub device_pixel_ratio: f32,
pub resource_override_path: Option<PathBuf>,
@@ -1771,6 +1773,7 @@ pub struct RendererOptions {
pub clear_color: ColorF,
pub render_target_debug: bool,
pub workers: Option<Arc<Mutex<ThreadPool>>>,
pub blob_image_renderer: Option<Box<BlobImageRenderer>>,
pub recorder: Option<Box<ApiRecordingReceiver>>,
}

@@ -1790,6 +1793,7 @@ impl Default for RendererOptions {
clear_color: ColorF::new(1.0, 1.0, 1.0, 1.0),
render_target_debug: false,
workers: None,
blob_image_renderer: None,
recorder: None,
}
}
@@ -24,6 +24,7 @@ use webrender_traits::{Epoch, FontKey, GlyphKey, ImageKey, ImageFormat, ImageRen
use webrender_traits::{FontRenderMode, ImageData, GlyphDimensions, WebGLContextId};
use webrender_traits::{DevicePoint, DeviceIntSize, ImageDescriptor, ColorF};
use webrender_traits::{ExternalImageId, GlyphOptions, GlyphInstance};
use webrender_traits::{BlobImageRenderer, BlobImageDescriptor, BlobImageError};
use threadpool::ThreadPool;
use euclid::Point2D;

@@ -209,11 +210,15 @@ pub struct ResourceCache {
glyph_cache_tx: Sender<GlyphCacheMsg>,
glyph_cache_result_queue: Receiver<GlyphCacheResultMsg>,
pending_external_image_update_list: ExternalImageUpdateList,

blob_image_renderer: Option<Box<BlobImageRenderer>>,
blob_image_requests: HashSet<ImageRequest>,
}

impl ResourceCache {
pub fn new(texture_cache: TextureCache,
workers: Arc<Mutex<ThreadPool>>,
blob_image_renderer: Option<Box<BlobImageRenderer>>,
enable_aa: bool) -> ResourceCache {
let (glyph_cache_tx, glyph_cache_result_queue) = spawn_glyph_cache_thread(workers);

@@ -232,6 +237,9 @@ impl ResourceCache {
glyph_cache_tx: glyph_cache_tx,
glyph_cache_result_queue: glyph_cache_result_queue,
pending_external_image_update_list: ExternalImageUpdateList::new(),

blob_image_renderer: blob_image_renderer,
blob_image_requests: HashSet::new(),
}
}

@@ -321,14 +329,38 @@ impl ResourceCache {
webgl_texture.size = size;
}

pub fn request_image(&mut self,
key: ImageKey,
rendering: ImageRendering) {
pub fn request_image(&mut self, key: ImageKey, rendering: ImageRendering) {
debug_assert!(self.state == State::AddResources);
self.pending_image_requests.push(ImageRequest {
let request = ImageRequest {
key: key,
rendering: rendering,
});
};

let template = self.image_templates.get(&key).unwrap();
if let ImageData::Vector(ref data) = template.data {
if let Some(ref mut renderer) = self.blob_image_renderer {
let same_epoch = match self.cached_images.resources.get(&request) {
Some(entry) => entry.epoch == template.epoch,
None => false,
};

if !same_epoch && self.blob_image_requests.insert(request) {
renderer.request_blob_image(
key,
data.clone(),
&BlobImageDescriptor {
width: template.descriptor.width,
height: template.descriptor.height,
format: template.descriptor.format,
// TODO(nical): figure out the scale factor (should change with zoom).
scale_factor: 1.0,
},
);
}
}
} else {
self.pending_image_requests.push(request);
}
}

pub fn request_glyphs(&mut self,
@@ -453,7 +485,7 @@ impl ResourceCache {
let external_id = match image_template.data {
ImageData::ExternalHandle(id) => Some(id),
// raw and externalBuffer are all use resource_cache.
ImageData::Raw(..) | ImageData::ExternalBuffer(..) => None,
ImageData::Raw(..) | ImageData::ExternalBuffer(..) | ImageData::Vector(..) => None,
};

ImageProperties {
@@ -539,51 +571,85 @@ impl ResourceCache {
}
}

for request in self.pending_image_requests.drain(..) {
let cached_images = &mut self.cached_images;
let image_template = &self.image_templates[&request.key];
let image_data = image_template.data.clone();
let mut image_requests = mem::replace(&mut self.pending_image_requests, Vec::new());
for request in image_requests.drain(..) {
self.finalize_image_request(request, None);
}

match image_template.data {
ImageData::ExternalHandle(..) => {
// external handle doesn't need to update the texture_cache.
let mut blob_image_requests = mem::replace(&mut self.blob_image_requests, HashSet::new());
if self.blob_image_renderer.is_some() {
for request in blob_image_requests.drain() {
match self.blob_image_renderer.as_mut().unwrap()
.resolve_blob_image(request.key) {
Ok(image) => {
self.finalize_image_request(request, Some(ImageData::new(image.data)));
}
// TODO(nical): I think that we should handle these somewhat gracefully,
// at least in the out-of-memory scenario.
Err(BlobImageError::Oom) => {
// This one should be recoverable-ish.
panic!("Failed to render a vector image (OOM)");
}
Err(BlobImageError::InvalidKey) => {
panic!("Invalid vector image key");
}
Err(BlobImageError::InvalidData) => {
// TODO(nical): If we run into this we should kill the content process.
panic!("Invalid vector image data");
}
Err(BlobImageError::Other(msg)) => {
panic!("Vector image error {}", msg);
}
}
ImageData::Raw(..) | ImageData::ExternalBuffer(..) => {
match cached_images.entry(request.clone(), self.current_frame_id) {
Occupied(entry) => {
let image_id = entry.get().texture_cache_id;

if entry.get().epoch != image_template.epoch {
self.texture_cache.update(image_id,
image_template.descriptor,
image_data);

// Update the cached epoch
*entry.into_mut() = CachedImageInfo {
texture_cache_id: image_id,
epoch: image_template.epoch,
};
}
}
Vacant(entry) => {
let image_id = self.texture_cache.new_item_id();
}
}
}

let filter = match request.rendering {
ImageRendering::Pixelated => TextureFilter::Nearest,
ImageRendering::Auto | ImageRendering::CrispEdges => TextureFilter::Linear,
};
fn finalize_image_request(&mut self, request: ImageRequest, image_data: Option<ImageData>) {
let image_template = &self.image_templates[&request.key];
let image_data = image_data.unwrap_or_else(||{
image_template.data.clone()
});

match image_template.data {
ImageData::ExternalHandle(..) => {
// external handle doesn't need to update the texture_cache.
}
ImageData::Raw(..) | ImageData::ExternalBuffer(..) | ImageData::Vector(..) => {
match self.cached_images.entry(request.clone(), self.current_frame_id) {
Occupied(entry) => {
let image_id = entry.get().texture_cache_id;

self.texture_cache.insert(image_id,
if entry.get().epoch != image_template.epoch {
self.texture_cache.update(image_id,
image_template.descriptor,
filter,
image_data);

entry.insert(CachedImageInfo {
// Update the cached epoch
*entry.into_mut() = CachedImageInfo {
texture_cache_id: image_id,
epoch: image_template.epoch,
});
};
}
}
Vacant(entry) => {
let image_id = self.texture_cache.new_item_id();

let filter = match request.rendering {
ImageRendering::Pixelated => TextureFilter::Nearest,
ImageRendering::Auto | ImageRendering::CrispEdges => TextureFilter::Linear,
};

self.texture_cache.insert(image_id,
image_template.descriptor,
filter,
image_data);

entry.insert(CachedImageInfo {
texture_cache_id: image_id,
epoch: image_template.epoch,
});
}
}
}
}
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.