Fixed Metal warnings, made everything static for performance
This commit is contained in:
parent
cd045fde15
commit
a068b7b09f
@ -1,10 +1,10 @@
|
||||
|
||||
float quickDistance(vec4 a, vec4 b)
|
||||
STATIC float quickDistance(vec4 a, vec4 b)
|
||||
{
|
||||
return abs(a.x - b.x) + abs(a.y - b.y) + abs(a.z - b.z);
|
||||
}
|
||||
|
||||
vec4 omniScale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 omniScale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
vec2 pixel = position * input_resolution - vec2(0.5, 0.5);
|
||||
|
||||
@ -104,7 +104,7 @@ vec4 omniScale(sampler2D image, vec2 position, vec2 input_resolution, vec2 outpu
|
||||
return q22;
|
||||
}
|
||||
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
vec2 pixel = vec2(1.0, 1.0) / output_resolution;
|
||||
// 4-pixel super sampling
|
||||
|
@ -1,4 +1,4 @@
|
||||
vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
// o = offset, the width of a pixel
|
||||
vec2 o = 1.0 / input_resolution;
|
||||
@ -7,15 +7,15 @@ vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_
|
||||
// A B C
|
||||
// D E F
|
||||
// G H I
|
||||
vec4 A = texture(image, position + vec2( -o.x, o.y));
|
||||
// vec4 A = texture(image, position + vec2( -o.x, o.y));
|
||||
vec4 B = texture(image, position + vec2( 0, o.y));
|
||||
vec4 C = texture(image, position + vec2( o.x, o.y));
|
||||
// vec4 C = texture(image, position + vec2( o.x, o.y));
|
||||
vec4 D = texture(image, position + vec2( -o.x, 0));
|
||||
vec4 E = texture(image, position + vec2( 0, 0));
|
||||
vec4 F = texture(image, position + vec2( o.x, 0));
|
||||
vec4 G = texture(image, position + vec2( -o.x, -o.y));
|
||||
// vec4 G = texture(image, position + vec2( -o.x, -o.y));
|
||||
vec4 H = texture(image, position + vec2( 0, -o.y));
|
||||
vec4 I = texture(image, position + vec2( o.x, -o.y));
|
||||
// vec4 I = texture(image, position + vec2( o.x, -o.y));
|
||||
vec2 p = position * input_resolution;
|
||||
// p = the position within a pixel [0...1]
|
||||
p = fract(p);
|
||||
@ -38,7 +38,7 @@ vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_
|
||||
}
|
||||
}
|
||||
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
return mix(texture(image, position), scale2x(image, position, input_resolution, output_resolution), 0.5);
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
// o = offset, the width of a pixel
|
||||
vec2 o = 1.0 / input_resolution;
|
||||
@ -6,15 +6,15 @@ vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_
|
||||
// A B C
|
||||
// D E F
|
||||
// G H I
|
||||
vec4 A = texture(image, position + vec2( -o.x, o.y));
|
||||
// vec4 A = texture(image, position + vec2( -o.x, o.y));
|
||||
vec4 B = texture(image, position + vec2( 0, o.y));
|
||||
vec4 C = texture(image, position + vec2( o.x, o.y));
|
||||
// vec4 C = texture(image, position + vec2( o.x, o.y));
|
||||
vec4 D = texture(image, position + vec2( -o.x, 0));
|
||||
vec4 E = texture(image, position + vec2( 0, 0));
|
||||
vec4 F = texture(image, position + vec2( o.x, 0));
|
||||
vec4 G = texture(image, position + vec2( -o.x, -o.y));
|
||||
// vec4 G = texture(image, position + vec2( -o.x, -o.y));
|
||||
vec4 H = texture(image, position + vec2( 0, -o.y));
|
||||
vec4 I = texture(image, position + vec2( o.x, -o.y));
|
||||
// vec4 I = texture(image, position + vec2( o.x, -o.y));
|
||||
vec2 p = position * input_resolution;
|
||||
// p = the position within a pixel [0...1]
|
||||
p = fract(p);
|
||||
@ -37,12 +37,12 @@ vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_
|
||||
}
|
||||
}
|
||||
|
||||
vec4 aaScale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 aaScale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
return mix(texture(image, position), scale2x(image, position, input_resolution, output_resolution), 0.5);
|
||||
}
|
||||
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
// o = offset, the width of a pixel
|
||||
vec2 o = 1.0 / (input_resolution * 2.);
|
||||
@ -51,15 +51,15 @@ vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_re
|
||||
// A B C
|
||||
// D E F
|
||||
// G H I
|
||||
vec4 A = aaScale2x(image, position + vec2( -o.x, o.y), input_resolution, output_resolution);
|
||||
// vec4 A = aaScale2x(image, position + vec2( -o.x, o.y), input_resolution, output_resolution);
|
||||
vec4 B = aaScale2x(image, position + vec2( 0, o.y), input_resolution, output_resolution);
|
||||
vec4 C = aaScale2x(image, position + vec2( o.x, o.y), input_resolution, output_resolution);
|
||||
// vec4 C = aaScale2x(image, position + vec2( o.x, o.y), input_resolution, output_resolution);
|
||||
vec4 D = aaScale2x(image, position + vec2( -o.x, 0), input_resolution, output_resolution);
|
||||
vec4 E = aaScale2x(image, position + vec2( 0, 0), input_resolution, output_resolution);
|
||||
vec4 F = aaScale2x(image, position + vec2( o.x, 0), input_resolution, output_resolution);
|
||||
vec4 G = aaScale2x(image, position + vec2( -o.x, -o.y), input_resolution, output_resolution);
|
||||
// vec4 G = aaScale2x(image, position + vec2( -o.x, -o.y), input_resolution, output_resolution);
|
||||
vec4 H = aaScale2x(image, position + vec2( 0, -o.y), input_resolution, output_resolution);
|
||||
vec4 I = aaScale2x(image, position + vec2( o.x, -o.y), input_resolution, output_resolution);
|
||||
// vec4 I = aaScale2x(image, position + vec2( o.x, -o.y), input_resolution, output_resolution);
|
||||
vec4 R;
|
||||
vec2 p = position * input_resolution * 2.;
|
||||
// p = the position within a pixel [0...1]
|
||||
|
@ -1,4 +1,4 @@
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
vec2 pixel = position * input_resolution - vec2(0.5, 0.5);
|
||||
|
||||
|
@ -2,14 +2,14 @@
|
||||
|
||||
/* The colorspace used by the HQnx filters is not really YUV, despite the algorithm description claims it is. It is
|
||||
also not normalized. Therefore, we shall call the colorspace used by HQnx "HQ Colorspace" to avoid confusion. */
|
||||
vec3 rgb_to_hq_colospace(vec4 rgb)
|
||||
STATIC vec3 rgb_to_hq_colospace(vec4 rgb)
|
||||
{
|
||||
return vec3( 0.250 * rgb.r + 0.250 * rgb.g + 0.250 * rgb.b,
|
||||
0.250 * rgb.r - 0.000 * rgb.g - 0.250 * rgb.b,
|
||||
-0.125 * rgb.r + 0.250 * rgb.g - 0.125 * rgb.b);
|
||||
}
|
||||
|
||||
bool is_different(vec4 a, vec4 b)
|
||||
STATIC bool is_different(vec4 a, vec4 b)
|
||||
{
|
||||
vec3 diff = abs(rgb_to_hq_colospace(a) - rgb_to_hq_colospace(b));
|
||||
return diff.x > 0.188 || diff.y > 0.027 || diff.z > 0.031;
|
||||
@ -17,17 +17,17 @@ bool is_different(vec4 a, vec4 b)
|
||||
|
||||
#define P(m, r) ((pattern & (m)) == (r))
|
||||
|
||||
vec4 interp_2px(vec4 c1, float w1, vec4 c2, float w2)
|
||||
STATIC vec4 interp_2px(vec4 c1, float w1, vec4 c2, float w2)
|
||||
{
|
||||
return (c1 * w1 + c2 * w2) / (w1 + w2);
|
||||
}
|
||||
|
||||
vec4 interp_3px(vec4 c1, float w1, vec4 c2, float w2, vec4 c3, float w3)
|
||||
STATIC vec4 interp_3px(vec4 c1, float w1, vec4 c2, float w2, vec4 c3, float w3)
|
||||
{
|
||||
return (c1 * w1 + c2 * w2 + c3 * w3) / (w1 + w2 + w3);
|
||||
}
|
||||
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
// o = offset, the width of a pixel
|
||||
vec2 o = 1.0 / input_resolution;
|
||||
|
@ -2,7 +2,7 @@
|
||||
#define COLOR_HIGH 1.0
|
||||
#define SCANLINE_DEPTH 0.1
|
||||
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
vec2 pos = fract(position * input_resolution);
|
||||
vec2 sub_pos = fract(position * input_resolution * 6);
|
||||
|
@ -9,6 +9,7 @@ const vec2 input_resolution = vec2(160, 144);
|
||||
|
||||
#define equal(x, y) ((x) == (y))
|
||||
#define inequal(x, y) ((x) != (y))
|
||||
#define STATIC
|
||||
|
||||
out vec4 frag_color;
|
||||
|
||||
|
@ -12,6 +12,7 @@ typedef float4 vec4;
|
||||
typedef texture2d<half> sampler2D;
|
||||
#define equal(x, y) all((x) == (y))
|
||||
#define inequal(x, y) any((x) != (y))
|
||||
#define STATIC static
|
||||
|
||||
typedef struct {
|
||||
float4 position [[position]];
|
||||
|
@ -1,4 +1,4 @@
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
return texture(image, position);
|
||||
}
|
||||
|
@ -8,7 +8,7 @@
|
||||
*/
|
||||
|
||||
/* We use the same colorspace as the HQ algorithms. */
|
||||
vec3 rgb_to_hq_colospace(vec4 rgb)
|
||||
STATIC vec3 rgb_to_hq_colospace(vec4 rgb)
|
||||
{
|
||||
return vec3( 0.250 * rgb.r + 0.250 * rgb.g + 0.250 * rgb.b,
|
||||
0.250 * rgb.r - 0.000 * rgb.g - 0.250 * rgb.b,
|
||||
@ -16,7 +16,7 @@ vec3 rgb_to_hq_colospace(vec4 rgb)
|
||||
}
|
||||
|
||||
|
||||
bool is_different(vec4 a, vec4 b)
|
||||
STATIC bool is_different(vec4 a, vec4 b)
|
||||
{
|
||||
vec3 diff = abs(rgb_to_hq_colospace(a) - rgb_to_hq_colospace(b));
|
||||
return diff.x > 0.125 || diff.y > 0.027 || diff.z > 0.031;
|
||||
@ -24,7 +24,7 @@ bool is_different(vec4 a, vec4 b)
|
||||
|
||||
#define P(m, r) ((pattern & (m)) == (r))
|
||||
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
// o = offset, the width of a pixel
|
||||
vec2 o = 1.0 / input_resolution;
|
||||
|
@ -1,10 +1,9 @@
|
||||
|
||||
float quickDistance(vec4 a, vec4 b)
|
||||
STATIC float quickDistance(vec4 a, vec4 b)
|
||||
{
|
||||
return abs(a.x - b.x) + abs(a.y - b.y) + abs(a.z - b.z);
|
||||
}
|
||||
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
vec2 pixel = position * input_resolution - vec2(0.5, 0.5);
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
/* Shader implementation of Scale2x is adapted from https://gist.github.com/singron/3161079 */
|
||||
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
// o = offset, the width of a pixel
|
||||
vec2 o = 1.0 / input_resolution;
|
||||
@ -9,15 +9,15 @@ vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_re
|
||||
// A B C
|
||||
// D E F
|
||||
// G H I
|
||||
vec4 A = texture(image, position + vec2( -o.x, o.y));
|
||||
// vec4 A = texture(image, position + vec2( -o.x, o.y));
|
||||
vec4 B = texture(image, position + vec2( 0, o.y));
|
||||
vec4 C = texture(image, position + vec2( o.x, o.y));
|
||||
// vec4 C = texture(image, position + vec2( o.x, o.y));
|
||||
vec4 D = texture(image, position + vec2( -o.x, 0));
|
||||
vec4 E = texture(image, position + vec2( 0, 0));
|
||||
vec4 F = texture(image, position + vec2( o.x, 0));
|
||||
vec4 G = texture(image, position + vec2( -o.x, -o.y));
|
||||
// vec4 G = texture(image, position + vec2( -o.x, -o.y));
|
||||
vec4 H = texture(image, position + vec2( 0, -o.y));
|
||||
vec4 I = texture(image, position + vec2( o.x, -o.y));
|
||||
// vec4 I = texture(image, position + vec2( o.x, -o.y));
|
||||
vec2 p = position * input_resolution;
|
||||
// p = the position within a pixel [0...1]
|
||||
p = fract(p);
|
||||
|
@ -1,4 +1,4 @@
|
||||
vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
// o = offset, the width of a pixel
|
||||
vec2 o = 1.0 / input_resolution;
|
||||
@ -6,15 +6,15 @@ vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_
|
||||
// A B C
|
||||
// D E F
|
||||
// G H I
|
||||
vec4 A = texture(image, position + vec2( -o.x, o.y));
|
||||
// vec4 A = texture(image, position + vec2( -o.x, o.y));
|
||||
vec4 B = texture(image, position + vec2( 0, o.y));
|
||||
vec4 C = texture(image, position + vec2( o.x, o.y));
|
||||
// vec4 C = texture(image, position + vec2( o.x, o.y));
|
||||
vec4 D = texture(image, position + vec2( -o.x, 0));
|
||||
vec4 E = texture(image, position + vec2( 0, 0));
|
||||
vec4 F = texture(image, position + vec2( o.x, 0));
|
||||
vec4 G = texture(image, position + vec2( -o.x, -o.y));
|
||||
// vec4 G = texture(image, position + vec2( -o.x, -o.y));
|
||||
vec4 H = texture(image, position + vec2( 0, -o.y));
|
||||
vec4 I = texture(image, position + vec2( o.x, -o.y));
|
||||
// vec4 I = texture(image, position + vec2( o.x, -o.y));
|
||||
vec2 p = position * input_resolution;
|
||||
// p = the position within a pixel [0...1]
|
||||
vec4 R;
|
||||
@ -38,7 +38,7 @@ vec4 scale2x(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_
|
||||
}
|
||||
}
|
||||
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
// o = offset, the width of a pixel
|
||||
vec2 o = 1.0 / (input_resolution * 2.);
|
||||
@ -47,15 +47,15 @@ vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_re
|
||||
// A B C
|
||||
// D E F
|
||||
// G H I
|
||||
vec4 A = scale2x(image, position + vec2( -o.x, o.y), input_resolution, output_resolution);
|
||||
// vec4 A = scale2x(image, position + vec2( -o.x, o.y), input_resolution, output_resolution);
|
||||
vec4 B = scale2x(image, position + vec2( 0, o.y), input_resolution, output_resolution);
|
||||
vec4 C = scale2x(image, position + vec2( o.x, o.y), input_resolution, output_resolution);
|
||||
// vec4 C = scale2x(image, position + vec2( o.x, o.y), input_resolution, output_resolution);
|
||||
vec4 D = scale2x(image, position + vec2( -o.x, 0), input_resolution, output_resolution);
|
||||
vec4 E = scale2x(image, position + vec2( 0, 0), input_resolution, output_resolution);
|
||||
vec4 F = scale2x(image, position + vec2( o.x, 0), input_resolution, output_resolution);
|
||||
vec4 G = scale2x(image, position + vec2( -o.x, -o.y), input_resolution, output_resolution);
|
||||
// vec4 G = scale2x(image, position + vec2( -o.x, -o.y), input_resolution, output_resolution);
|
||||
vec4 H = scale2x(image, position + vec2( 0, -o.y), input_resolution, output_resolution);
|
||||
vec4 I = scale2x(image, position + vec2( o.x, -o.y), input_resolution, output_resolution);
|
||||
// vec4 I = scale2x(image, position + vec2( o.x, -o.y), input_resolution, output_resolution);
|
||||
vec2 p = position * input_resolution * 2.;
|
||||
// p = the position within a pixel [0...1]
|
||||
p = fract(p);
|
||||
|
@ -1,4 +1,4 @@
|
||||
vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
STATIC vec4 scale(sampler2D image, vec2 position, vec2 input_resolution, vec2 output_resolution)
|
||||
{
|
||||
vec2 pixel = position * input_resolution - vec2(0.5, 0.5);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user