使用FabricJS过滤器和自定义控件扭曲图像,通过拖动角点控制点,图像从中心调整大小



我在Fabric.js 4.3.0中创建了一个子类来扩展Fabric。图像,这有助于我更改渲染功能,使图像始终适合边界框。

我还为Fabric创建了一个自定义过滤器,使用它,通过提供4个角坐标,我可以扭曲图像,类似于Photoshop的自由变换->扭曲工具。

当我的代码工作时,问题是当我拖动角控件时,图像总是从中心调整大小,同时移动其他控件点。

我正在尝试遵循关于如何使用自定义控制点调整织物中对象大小的说明、多边形和其他形状的说明,但它没有产生图像所需的结果。

我想要实现的结果是,当拖动其中一个绿色控制点时,图像应该扭曲,但图像和其他控制点必须保持在各自的位置而不移动,类似于您在这里看到的:https://youtu.be/Pn-9qFNM6Zg?t=274

以下是演示的JSFIDDLE:https://jsfiddle.net/human_a/p6d71skm/

fabric.textureSize = 4096;
// Set default filter backend
fabric.filterBackend = new fabric.WebglFilterBackend();
fabric.isWebglSupported(fabric.textureSize);
fabric.Image.filters.Perspective = class extends fabric.Image.filters.BaseFilter {
/**
* Constructor
* @param {Object} [options] Options object
*/
constructor(options) {
super();
if (options) this.setOptions(options);
this.applyPixelRatio();
}
type = 'Perspective';
pixelRatio = fabric.devicePixelRatio;
bounds = {width: 0, height: 0, minX: 0, maxX: 0, minY: 0, maxY: 0};
hasRelativeCoordinates = true;
/**
* Array of attributes to send with buffers. do not modify
* @private
*//** @ts-ignore */
vertexSource = `
precision mediump float;
attribute vec2 aPosition;
attribute vec2 aUvs;
uniform float uStepW;
uniform float uStepH;
varying vec2 vUvs;
vec2 uResolution;
void main() {
vUvs = aUvs;
uResolution = vec2(uStepW, uStepH);
gl_Position = vec4(uResolution * aPosition * 2.0 - 1.0, 0.0, 1.0);
}
`;
fragmentSource = `
precision mediump float;
varying vec2 vUvs;
uniform sampler2D uSampler;
void main() {
gl_FragColor = texture2D(uSampler, vUvs);
}
`;
/**
* Return a map of attribute names to WebGLAttributeLocation objects.
*
* @param {WebGLRenderingContext} gl The canvas context used to compile the shader program.
* @param {WebGLShaderProgram} program The shader program from which to take attribute locations.
* @returns {Object} A map of attribute names to attribute locations.
*/
getAttributeLocations(gl, program) {
return {
aPosition: gl.getAttribLocation(program, 'aPosition'),
aUvs: gl.getAttribLocation(program, 'aUvs'),
};
}
/**
* Send attribute data from this filter to its shader program on the GPU.
*
* @param {WebGLRenderingContext} gl The canvas context used to compile the shader program.
* @param {Object} attributeLocations A map of shader attribute names to their locations.
*/
sendAttributeData(gl, attributeLocations, data, type = 'aPosition') {
const attributeLocation = attributeLocations[type];
if (gl[type + 'vertexBuffer'] == null) {
gl[type + 'vertexBuffer'] = gl.createBuffer();
}
gl.bindBuffer(gl.ARRAY_BUFFER, gl[type+'vertexBuffer']);
gl.enableVertexAttribArray(attributeLocation);
gl.vertexAttribPointer(attributeLocation, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW);
}
generateSurface() {
const corners = this.perspectiveCoords;
const surface = verb.geom.NurbsSurface.byCorners(...corners);
const tess = surface.tessellate();
return tess;
}
/**
* Apply the resize filter to the image
* Determines whether to use WebGL or Canvas2D based on the options.webgl flag.
*
* @param {Object} options
* @param {Number} options.passes The number of filters remaining to be executed
* @param {Boolean} options.webgl Whether to use webgl to render the filter.
* @param {WebGLTexture} options.sourceTexture The texture setup as the source to be filtered.
* @param {WebGLTexture} options.targetTexture The texture where filtered output should be drawn.
* @param {WebGLRenderingContext} options.context The GL context used for rendering.
* @param {Object} options.programCache A map of compiled shader programs, keyed by filter type.
*/
applyTo(options) {
if (options.webgl) {
const { width, height } = this.getPerspectiveBounds();
options.context.canvas.width = width;
options.context.canvas.height = height;
options.destinationWidth = width;
options.destinationHeight = height;
this.hasRelativeCoordinates && this.calculateCoordsByCorners();
this._setupFrameBuffer(options);
this.applyToWebGL(options);
this._swapTextures(options);
}
}
applyPixelRatio(coords = this.perspectiveCoords) {
for(let i = 0; i < coords.length; i++) {
coords[i][0] *= this.pixelRatio;
coords[i][1] *= this.pixelRatio;
}
return coords;
}
getPerspectiveBounds(coords = this.perspectiveCoords) {
coords = this.perspectiveCoords.slice().map(c => (
{
x: c[0],
y: c[1],
}
));
this.bounds.minX = fabric.util.array.min(coords, 'x') || 0;
this.bounds.minY = fabric.util.array.min(coords, 'y') || 0;
this.bounds.maxX = fabric.util.array.max(coords, 'x') || 0;
this.bounds.maxY = fabric.util.array.max(coords, 'y') || 0;
this.bounds.width = Math.abs(this.bounds.maxX - this.bounds.minX);
this.bounds.height = Math.abs(this.bounds.maxY - this.bounds.minY);
return {
width:  this.bounds.width,
height: this.bounds.height,
minX:   this.bounds.minX,
maxX:   this.bounds.maxX,
minY:   this.bounds.minY,
maxY:   this.bounds.maxY,
};
}
/**
* @description coordinates are coming in relative to mockup item sections
* the following function normalizes the coords based on canvas corners
*
* @param {number[]} coords
*/
calculateCoordsByCorners(coords = this.perspectiveCoords) {
for(let i = 0; i < coords.length; i++) {
coords[i][0] -= this.bounds.minX;
coords[i][1] -= this.bounds.minY;
}
}
/**
* Apply this filter using webgl.
*
* @param {Object} options
* @param {Number} options.passes The number of filters remaining to be executed
* @param {Boolean} options.webgl Whether to use webgl to render the filter.
* @param {WebGLTexture} options.originalTexture The texture of the original input image.
* @param {WebGLTexture} options.sourceTexture The texture setup as the source to be filtered.
* @param {WebGLTexture} options.targetTexture The texture where filtered output should be drawn.
* @param {WebGLRenderingContext} options.context The GL context used for rendering.
* @param {Object} options.programCache A map of compiled shader programs, keyed by filter type.
*/
applyToWebGL(options) {
const gl = options.context;
const shader = this.retrieveShader(options);
const tess = this.generateSurface(options.sourceWidth, options.sourceHeight);
const indices = new Uint16Array(_.flatten(tess.faces));
// Clear the canvas first
this.clear(gl); // !important
// bind texture buffer
this.bindTexture(gl, options);
gl.useProgram(shader.program);
// create the buffer
this.indexBuffer(gl, indices);
this.sendAttributeData(gl, shader.attributeLocations, new Float32Array(_.flatten(tess.points)), 'aPosition');
this.sendAttributeData(gl, shader.attributeLocations, new Float32Array(_.flatten(tess.uvs)), 'aUvs');
gl.uniform1f(shader.uniformLocations.uStepW, 1 / gl.canvas.width);
gl.uniform1f(shader.uniformLocations.uStepH, 1 / gl.canvas.height);
this.sendUniformData(gl, shader.uniformLocations);
gl.viewport(0, 0, options.destinationWidth, options.destinationHeight);
// enable indices up to 4294967296 for webGL 1.0
gl.getExtension('OES_element_index_uint');
gl.drawElements(gl.TRIANGLES, indices.length, gl.UNSIGNED_SHORT, 0);
}
clear(gl) {
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
}
bindTexture(gl, options) {
if (options.pass === 0 && options.originalTexture) {
gl.bindTexture(gl.TEXTURE_2D, options.originalTexture);
} else {
gl.bindTexture(gl.TEXTURE_2D, options.sourceTexture);
}
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
indexBuffer(gl, data) {
const indexBuffer = gl.createBuffer();
// make this buffer the current 'ELEMENT_ARRAY_BUFFER'
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
// Fill the current element array buffer with data
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW);
}
};
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Perspective} Instance of fabric.Image.filters.Perspective
*/
fabric.Image.filters.Perspective.fromObject = fabric.Image.filters.BaseFilter.fromObject;

/**
* Photo subclass
* @class fabric.Photo
* @extends fabric.Photo
* @return {fabric.Photo} thisArg
*
*/
fabric.Photo = class extends fabric.Image {
type = 'photo';
repeat = 'no-repeat';
fill = 'transparent';
initPerspective = true;
cacheProperties = fabric.Image.prototype.cacheProperties.concat('perspectiveCoords');
constructor(src, options) {
super(options);
if (options) this.setOptions(options);
this.on('added', () => {
const image = new Image();
image.setAttribute('crossorigin', 'anonymous');
image.onload = () => {
this._initElement(image, options);
this.width = image.width / 2;
this.height = image.height / 2;
this.loaded = true;
this.setCoords();
this.fire('image:loaded');
};
image.src = src;

this.on('image:loaded', () => {
!this.perspectiveCoords && this.getInitialPerspective();

this.togglePerspective();
this.canvas.requestRenderAll();
});
});
}
cacheProperties = fabric.Image.prototype.cacheProperties.concat('perspectiveCoords');
/**
* @private
* @param {CanvasRenderingContext2D} ctx Context to render on
*//** @ts-ignore */
_render(ctx) {
fabric.util.setImageSmoothing(ctx, this.imageSmoothing);
if (this.isMoving !== true && this.resizeFilter && this._needsResize()) {
this.applyResizeFilters();
}
this._stroke(ctx);
this._renderPaintInOrder(ctx);
}
/**
* @private
* @param {CanvasRenderingContext2D} ctx Context to render on
*//** @ts-ignore */
_renderFill(ctx) {
var elementToDraw = this._element;
if (!elementToDraw) return;
ctx.save();
const elWidth = elementToDraw.naturalWidth || elementToDraw.width;
const elHeight = elementToDraw.naturalHeight || elementToDraw.height;
const width = this.width;
const height = this.height;
ctx.translate(-width / 2, -height / 2);
// get the scale
const scale = Math.min(width / elWidth, height / elHeight);
// get the top left position of the image
const x = (width / 2) - (elWidth / 2) * scale;
const y = (height / 2) - (elHeight / 2) * scale;
ctx.drawImage(elementToDraw, x, y, elWidth * scale, elHeight * scale);
ctx.restore();
}
togglePerspective(mode = true) {
this.set('perspectiveMode', mode);
// this.set('hasBorders', !mode);
if (mode === true) {
this.set('layout', 'fit');
var lastControl = this.perspectiveCoords.length - 1;
this.controls = this.perspectiveCoords.reduce((acc, coord, index) => {
const anchorIndex = index > 0 ? index - 1 : lastControl;
let name = `prs${index + 1}`;
acc[name] = new fabric.Control({
name,
x: -0.5,
y: -0.5,
actionHandler: this._actionWrapper(anchorIndex, (_, transform, x, y) => {
const target = transform.target;
const localPoint = target.toLocalPoint(new fabric.Point(x, y), 'left', 'top');
coord[0] = localPoint.x / target.scaleX * fabric.devicePixelRatio;
coord[1] = localPoint.y / target.scaleY * fabric.devicePixelRatio;
target.setCoords();
target.applyFilters();
return true;
}),
positionHandler: function (dim, finalMatrix, fabricObject) {
const zoom = fabricObject.canvas.getZoom();
const scalarX = fabricObject.scaleX * zoom / fabric.devicePixelRatio;
const scalarY = fabricObject.scaleY * zoom / fabric.devicePixelRatio;
var point = fabric.util.transformPoint({
x: this.x * dim.x + this.offsetX + coord[0] * scalarX,
y: this.y * dim.y + this.offsetY + coord[1] * scalarY,
}, finalMatrix
);
return point;
},
cursorStyleHandler: () => 'cell',
render: function(ctx, left, top, _, fabricObject) {
const zoom = fabricObject.canvas.getZoom();
const scalarX = fabricObject.scaleX * zoom / fabric.devicePixelRatio;
const scalarY = fabricObject.scaleY * zoom / fabric.devicePixelRatio;
ctx.save();
ctx.translate(left, top);
ctx.rotate(fabric.util.degreesToRadians(fabricObject.angle));
ctx.beginPath();
ctx.moveTo(0, 0);
ctx.strokeStyle = 'green';
if (fabricObject.perspectiveCoords[index + 1]) {
ctx.strokeStyle = 'green';
ctx.lineTo(
(fabricObject.perspectiveCoords[index + 1][0] - coord[0]) * scalarX,
(fabricObject.perspectiveCoords[index + 1][1] - coord[1]) * scalarY,
);
} else {
ctx.lineTo(
(fabricObject.perspectiveCoords[0][0] - coord[0]) * scalarX,
(fabricObject.perspectiveCoords[0][1] - coord[1]) * scalarY,
);
}
ctx.stroke();
ctx.beginPath();
ctx.arc(0, 0, 4, 0, Math.PI * 2);
ctx.closePath();
ctx.fillStyle = 'green';
ctx.fill();
ctx.stroke();
ctx.restore();
},
offsetX: 0,
offsetY: 0,
actionName: 'perspective-coords',
});
return acc;
}, {});
} else {
this.controls = fabric.Photo.prototype.controls;
}
this.canvas.requestRenderAll();
}
_actionWrapper(anchorIndex, fn) {
return function(eventData, transform, x, y) {
if (!transform || !eventData) return;
const { target } = transform;
target._resetSizeAndPosition(anchorIndex);
const actionPerformed = fn(eventData, transform, x, y);
return actionPerformed;
};
}
/**
* @description manually reset the bounding box after points update
*
* @see http://fabricjs.com/custom-controls-polygon
* @param {number} index
*/
_resetSizeAndPosition = (index, apply = true) => {
const absolutePoint = fabric.util.transformPoint({
x: this.perspectiveCoords[index][0],
y: this.perspectiveCoords[index][1],
}, this.calcTransformMatrix());
this._setPositionDimensions({});
const penBaseSize = this._getNonTransformedDimensions();
const newX = (this.perspectiveCoords[index][0]) / penBaseSize.x;
const newY = (this.perspectiveCoords[index][1]) / penBaseSize.y;
this.setPositionByOrigin(absolutePoint, newX + 0.5, newY + 0.5);
apply && this._applyPointsOffset();
}
/**
* This is modified version of the internal fabric function
* this helps determine the size and the location of the path
*
* @param {object} options
*/
_setPositionDimensions(options) {
const { left, top, width, height } = this._calcDimensions(options);
this.width = width;
this.height = height;
var correctLeftTop = this.translateToGivenOrigin(
{
x: left,
y: top,
},
'left',
'top',
this.originX,
this.originY
);
if (typeof options.left === 'undefined') {
this.left = correctLeftTop.x;
}
if (typeof options.top === 'undefined') {
this.top = correctLeftTop.y;
}
this.pathOffset = {
x: left,
y: top,
};
return { left, top, width, height };
}
/**
* @description this is based on fabric.Path._calcDimensions
*
* @private
*/
_calcDimensions() {
const coords = this.perspectiveCoords.slice().map(c => (
{
x: c[0] / fabric.devicePixelRatio,
y: c[1] / fabric.devicePixelRatio,
}
));
const minX = fabric.util.array.min(coords, 'x') || 0;
const minY = fabric.util.array.min(coords, 'y') || 0;
const maxX = fabric.util.array.max(coords, 'x') || 0;
const maxY = fabric.util.array.max(coords, 'y') || 0;
const width = Math.abs(maxX - minX);
const height = Math.abs(maxY - minY);
return {
left: minX,
top: minY,
width: width,
height: height,
};
}
/**
* @description This is modified version of the internal fabric function
* this subtracts the path offset from each path points
*/
_applyPointsOffset() {
for (let i = 0; i < this.perspectiveCoords.length; i++) {
const coord = this.perspectiveCoords[i];
coord[0] -= this.pathOffset.x;
coord[1] -= this.pathOffset.y;
}
}
/**
* @description generate the initial coordinates for warping, based on image dimensions
*
*/
getInitialPerspective() {
let w = this.getScaledWidth();
let h = this.getScaledHeight();
const perspectiveCoords = [
[0, 0], // top left
[w, 0], // top right
[w, h], // bottom right
[0, h], // bottom left
];
this.perspectiveCoords = perspectiveCoords;
const perspectiveFilter = new fabric.Image.filters.Perspective({
hasRelativeCoordinates: false,
pixelRatio: fabric.devicePixelRatio, // the Photo is already retina ready
perspectiveCoords
});
this.filters.push(perspectiveFilter);
this.applyFilters();
return perspectiveCoords;
}
};
/**
* Creates an instance of fabric.Photo from its object representation
* @static
* @param {Object} object Object to create an instance from
* @param {Function} callback Callback to invoke when an image instance is created
*/
fabric.Photo.fromObject = function(_object, callback) {
const object = fabric.util.object.clone(_object);
object.layout = _object.layout;
fabric.util.loadImage(object.src, function(img, isError) {
if (isError) {
callback && callback(null, true);
return;
}
fabric.Photo.prototype._initFilters.call(object, object.filters, function(filters) {
object.filters = filters || [];
fabric.Photo.prototype._initFilters.call(object, [object.resizeFilter], function(resizeFilters) {
object.resizeFilter = resizeFilters[0];
fabric.util.enlivenObjects([object.clipPath], function(enlivedProps) {
object.clipPath = enlivedProps[0];
var image = new fabric.Photo(img, object);
callback(image, false);
});
});
});
}, null, object.crossOrigin || 'anonymous');
};

const canvas = new fabric.Canvas(document.getElementById('canvas'), {
backgroundColor: 'white',
enableRetinaScaling: true,
});
function resizeCanvas() {
canvas.setWidth(window.innerWidth);
canvas.setHeight(window.innerHeight);
}
resizeCanvas();
window.addEventListener('resize', () => resizeCanvas(), false);
const photo = new fabric.Photo('https://cdn.artboard.studio/private/5cb9c751-5f17-4062-adb7-6ec2c137a65d/user_uploads/5bafe170-1580-4d6b-a3be-f5cdce22d17d-asdasdasd.jpg', {
left: canvas.getWidth() / 2,
top: canvas.getHeight() / 2,
originX: 'center',
originY: 'center',
});
canvas.add(photo);
canvas.setActiveObject(photo);
body {
margin: 0;
}
<script src="https://cdn.jsdelivr.net/npm/lodash@4.17.20/lodash.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/verb-nurbs-web@2.1.3/build/js/verb.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/fabric@4.3.0/dist/fabric.min.js"></script>
<canvas id="canvas"></canvas>

我怀疑_resetSizeAndPosition中对absolutePoint的引用需要考虑图像的来源,并且有一个简单的解决方案。然而,我没有找到一个好的方法来做到这一点,并诉诸于手动"校正";CCD_ 3中的这个问题。

_resetSizeAndPosition的修改版本看起来是这样的:

_resetSizeAndPosition = (index, apply = true) => {
const absolutePoint = fabric.util.transformPoint({
x: this.perspectiveCoords[index][0],
y: this.perspectiveCoords[index][1],
}, this.calcTransformMatrix());
let { height, width, left, top } = this._calcDimensions({});
const widthDiff = (width - this.width) / 2;
if ((left < 0 && widthDiff > 0) || (left > 0 && widthDiff < 0)) {
absolutePoint.x -= widthDiff;
} else {
absolutePoint.x += widthDiff;
}
const heightDiff = (height - this.height) / 2;
if ((top < 0 && heightDiff > 0) || (top > 0 && heightDiff < 0)) {
absolutePoint.y -= heightDiff;
} else {
absolutePoint.y += heightDiff;
}
this._setPositionDimensions({});
const penBaseSize = this._getNonTransformedDimensions();
const newX = (this.perspectiveCoords[index][0]) / penBaseSize.x;
const newY = (this.perspectiveCoords[index][1]) / penBaseSize.y;
this.setPositionByOrigin(absolutePoint, newX + 0.5, newY + 0.5);
apply && this._applyPointsOffset();
}

这种方法的基本原理是永远不会更新对象的lefttop属性。在您的示例中,通过修改图像并检查图像上的属性,可以通过控制台看到这一点。因此,我们需要根据宽度和高度的变化对位置属性进行校正。这确保了其他点保持固定,因为我们补偿了图像在其位置上不断变化的高度和宽度。

通过比较widththis.width的值,可以确定图像的大小是增大还是减小。left的值指示拉伸是发生在图像的左侧还是右侧。如果用户将图像向左拉伸或从右收缩,那么我们需要。通过结合这些条件,我们可以知道我们需要如何修改图像的位置来进行补偿。用于水平值的相同方法也应用于垂直值。

JSFiddle:https://jsfiddle.net/0x8caow6/

最新更新