Changeset View
Standalone View
release/scripts/freestyle/modules/freestyle/shaders.py
| Context not available. | |||||
| ) | ) | ||||
| from freestyle.predicates import ( | from freestyle.predicates import ( | ||||
| pyVertexNatureUP0D, | pyVertexNatureUP0D, | ||||
| pyUEqualsUP0D, | |||||
| ) | ) | ||||
| from freestyle.utils import ( | |||||
| iter_triplet, | |||||
| curvature, | |||||
| ) | |||||
| from freestyle.utils import ContextFunctions as CF | from freestyle.utils import ContextFunctions as CF | ||||
| from math import atan, cos, pi, pow, sin, sinh, sqrt | import bpy | ||||
| from mathutils import Vector | import random | ||||
| from random import randint | import time | ||||
kjym3: Please, consider keeping the original order of classes to make code review easier. | |||||
| from math import atan, cos, pi, sin, sinh, sqrt | |||||
| from mathutils import Vector, Color | |||||
| from random import randint | |||||
| ## thickness modifiers | |||||
| ###################### | |||||
| class pyDepthDiscontinuityThicknessShader(StrokeShader): | |||||
| """ | |||||
| Assigns a thickness to the stroke based on the stroke's distance | |||||
| to the camera (Z-value) | |||||
| """ | |||||
| def __init__(self, min, max): | |||||
| StrokeShader.__init__(self) | |||||
| self.__min = float(min) | |||||
| self.__max = float(max) | |||||
| self.__func = ZDiscontinuityF0D() | |||||
| def shade(self, stroke): | |||||
| z_min=0.0 | |||||
| z_max=1.0 | """-- Thickness Stroke Shaders --""" | ||||
| a = (self.__max - self.__min)/(z_max-z_min) | |||||
| b = (self.__min*z_max-self.__max*z_min)/(z_max-z_min) | |||||
| it = stroke.stroke_vertices_begin() | |||||
| while not it.is_end: | |||||
| z = self.__func(Interface0DIterator(it)) | |||||
| thickness = a*z+b | |||||
| it.object.attribute.thickness = (thickness, thickness) | |||||
| it.increment() | |||||
| class pyConstantThicknessShader(StrokeShader): | class pyConstantThicknessShader(StrokeShader): | ||||
| Context not available. | |||||
| """ | """ | ||||
| def __init__(self, thickness): | def __init__(self, thickness): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._thickness = thickness | self._thickness = thickness / 2.0 | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | for svert in stroke: | ||||
| while not it.is_end: | svert.attribute.thickness = (self._thickness, self._thickness) | ||||
| t = self._thickness/2.0 | |||||
| it.object.attribute.thickness = (t, t) | class pyDepthDiscontinuityThicknessShader(StrokeShader): | ||||
| it.increment() | """ | ||||
| Assigns a thickness to the stroke based on the stroke's distance | |||||
| to the camera (Z-value) | |||||
| """ | |||||
| def __init__(self, min, max): | |||||
| StrokeShader.__init__(self) | |||||
| self.a = float(max - min) | |||||
| self.b = float(min) | |||||
| self.func = ZDiscontinuityF0D() | |||||
| def shade(self, stroke): | |||||
| it = Interface0DIterator(iter(stroke)) | |||||
| for svert, inter in zip(stroke, it): | |||||
| z = self.func(it) | |||||
| thickness = self.a * z + self.b | |||||
| svert.attribute.thickness = (thickness, thickness) | |||||
| class pyFXSVaryingThicknessWithDensityShader(StrokeShader): | class pyFXSVaryingThicknessWithDensityShader(StrokeShader): | ||||
| Context not available. | |||||
| """ | """ | ||||
| def __init__(self, wsize, threshold_min, threshold_max, thicknessMin, thicknessMax): | def __init__(self, wsize, threshold_min, threshold_max, thicknessMin, thicknessMax): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self.wsize= wsize | self._func = DensityF0D(wsize) | ||||
| self.threshold_min= threshold_min | self.threshold_min = threshold_min | ||||
| self.threshold_max= threshold_max | self.threshold_max = threshold_max | ||||
| self._thicknessMin = thicknessMin | self._thicknessMin = thicknessMin | ||||
| self._thicknessMax = thicknessMax | self._thicknessMax = thicknessMax | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| n = stroke.stroke_vertices_size() | it = Interface0DIterator(iter(stroke)) | ||||
| i = 0 | delta_threshold = self.threshold_max - self.threshold_min | ||||
| it = stroke.stroke_vertices_begin() | delta_thickness = self._thicknessMax - self._thicknessMin | ||||
| func = DensityF0D(self.wsize) | |||||
| while not it.is_end: | for svert, inter in zip(stroke, it): | ||||
| c = func(Interface0DIterator(it)) | c = self._func(it) | ||||
| if c < self.threshold_min: | # check if c is within the threshold, adjust otherwise | ||||
| c = self.threshold_min | c = min(max(c, self.threshold_min), self.threshold_max) | ||||
| if c > self.threshold_max: | t = (self.threshold_max - c) / delta_threshold * delta_thickness + self._thicknessMin | ||||
| c = self.threshold_max | svert.attribute.thickness = (t/2.0, t/2.0) | ||||
| ## t = (c - self.threshold_min)/(self.threshold_max - self.threshold_min)*(self._thicknessMax-self._thicknessMin) + self._thicknessMin | |||||
| t = (self.threshold_max - c )/(self.threshold_max - self.threshold_min)*(self._thicknessMax-self._thicknessMin) + self._thicknessMin | |||||
| it.object.attribute.thickness = (t/2.0, t/2.0) | |||||
| i = i+1 | |||||
| it.increment() | |||||
| class pyIncreasingThicknessShader(StrokeShader): | class pyIncreasingThicknessShader(StrokeShader): | ||||
Not Done Inline ActionsCasting to float is not necessary in Python 3. An integer divided by another gives a float value. kjym3: Casting to float is not necessary in Python 3. An integer divided by another gives a float… | |||||
| Context not available. | |||||
| self._thicknessMax = thicknessMax | self._thicknessMax = thicknessMax | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| n = stroke.stroke_vertices_size() | n = len(stroke) | ||||
| i = 0 | for i, svert in enumerate(stroke): | ||||
| it = stroke.stroke_vertices_begin() | c = float(i) / n | ||||
Not Done Inline Actionsc = i / n kjym3: c = i / n | |||||
| while not it.is_end: | if i < (n * 0.5): | ||||
| c = float(i)/float(n) | t = (1.0 - c) * self._thicknessMin + c * self._thicknessMax | ||||
| if i < float(n)/2.0: | |||||
| t = (1.0 - c)*self._thicknessMin + c * self._thicknessMax | |||||
| else: | else: | ||||
| t = (1.0 - c)*self._thicknessMax + c * self._thicknessMin | t = (1.0 - c) * self._thicknessMax + c * self._thicknessMin | ||||
| it.object.attribute.thickness = (t/2.0, t/2.0) | svert.attribute.thickness = (t/2.0, t/2.0) | ||||
| i = i+1 | |||||
| it.increment() | |||||
| class pyConstrainedIncreasingThicknessShader(StrokeShader): | class pyConstrainedIncreasingThicknessShader(StrokeShader): | ||||
| Context not available. | |||||
| self._ratio = ratio | self._ratio = ratio | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| slength = stroke.length_2d | n = len(stroke) | ||||
| tmp = self._ratio*slength | maxT = min(self._ratio * stroke.length_2d, self._thicknessMax) | ||||
| maxT = 0.0 | |||||
| if tmp < self._thicknessMax: | for i, svert in enumerate(stroke): | ||||
| maxT = tmp | c = float(i) / n | ||||
Not Done Inline Actionsc = i / n kjym3: c = i / n | |||||
| else: | if i < (n * 0.5): | ||||
| maxT = self._thicknessMax | t = (1.0 - c) * self._thicknessMin + c * maxT | ||||
| n = stroke.stroke_vertices_size() | |||||
| i = 0 | |||||
| it = stroke.stroke_vertices_begin() | |||||
| while not it.is_end: | |||||
| att = it.object.attribute | |||||
| c = float(i)/float(n) | |||||
| if i < float(n)/2.0: | |||||
| t = (1.0 - c)*self._thicknessMin + c * maxT | |||||
| else: | else: | ||||
| t = (1.0 - c)*maxT + c * self._thicknessMin | t = (1.0 - c) * maxT + c * self._thicknessMin | ||||
| att.thickness = (t/2.0, t/2.0) | |||||
| if i == n-1: | if i == (n - 1): | ||||
| att.thickness = (self._thicknessMin/2.0, self._thicknessMin/2.0) | svert.attribute.thickness = (self._thicknessMin/2.0, self._thicknessMin/2.0) | ||||
| i = i+1 | else: | ||||
| it.increment() | svert.attribute.thickness = (t/2.0, t/2.0) | ||||
| class pyDecreasingThicknessShader(StrokeShader): | class pyDecreasingThicknessShader(StrokeShader): | ||||
| Context not available. | |||||
Not Done Inline ActionsJust a thought: This shader relies on vertex index to determine new stroke thickness. That would work fine if the vertex intervals are more and less the same; otherwise nonlinear thickness variation is expected. We might want to rely on the u parameter of the stroke instead, to assure smooth thickness changes. kjym3: Just a thought: This shader relies on vertex index to determine new stroke thickness. That… | |||||
Not Done Inline Actionsthought about this aswell. speedwise, it should barely matter (may be faster even, albeit mininally) It would however make regression testing inaccurate. I propose to stick to the old approach for now, verify that all works well. Then we can implement the use of the u parameter. flokkievids: thought about this aswell. speedwise, it should barely matter (may be faster even, albeit… | |||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| l = stroke.length_2d | l = stroke.length_2d | ||||
| tMax = self._thicknessMax | n = len(stroke) | ||||
| if self._thicknessMax > 0.33*l: | tMax = min(self._thicknessMax, 0.33 * l) | ||||
| tMax = 0.33*l | tMin = min(self._thicknessMin, 0.10 * l) | ||||
| tMin = self._thicknessMin | |||||
| if self._thicknessMin > 0.1*l: | for i, svert in enumerate(stroke): | ||||
| tMin = 0.1*l | c = i / n | ||||
| n = stroke.stroke_vertices_size() | t = (1.0 - c) * tMax + c * tMin | ||||
| i = 0 | svert.attribute.thickness = (t/2.0, t/2.0) | ||||
| it = stroke.stroke_vertices_begin() | |||||
| while not it.is_end: | |||||
| c = float(i)/float(n) | |||||
| t = (1.0 - c)*tMax +c*tMin | |||||
| it.object.attribute.thickness = (t/2.0, t/2.0) | |||||
| i = i+1 | |||||
| it.increment() | |||||
| class pyNonLinearVaryingThicknessShader(StrokeShader): | class pyNonLinearVaryingThicknessShader(StrokeShader): | ||||
| Context not available. | |||||
| Assigns thickness to a stroke based on an exponential function | Assigns thickness to a stroke based on an exponential function | ||||
| """ | """ | ||||
| def __init__(self, thicknessExtremity, thicknessMiddle, exponent): | def __init__(self, thicknessExtremity, thicknessMiddle, exponent): | ||||
Not Done Inline ActionsRedundant casting to float: c should already be a float. kjym3: Redundant casting to float: `c` should already be a float. | |||||
| StrokeShader.__init__(self) | |||||
| self._thicknessMin = thicknessMiddle | self._thicknessMin = thicknessMiddle | ||||
| self._thicknessMax = thicknessExtremity | self._thicknessMax = thicknessExtremity | ||||
| self._exponent = exponent | self._exp = exponent | ||||
| StrokeShader.__init__(self) | |||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| n = stroke.stroke_vertices_size() | n = len(stroke) | ||||
| i = 0 | for i, svert in enumerate(stroke): | ||||
| it = stroke.stroke_vertices_begin() | c = (i / n) if (i < n / 2.0) else ((n - i) / n) | ||||
| while not it.is_end: | c = pow(c, self._exp) * pow(2.0, self._exp) | ||||
| if i < float(n)/2.0: | t = (1.0 - c) * self._thicknessMax + c * self._thicknessMin | ||||
| c = float(i)/float(n) | svert.attribute.thickness = (t/2.0, t/2.0) | ||||
| else: | |||||
| c = float(n-i)/float(n) | |||||
| c = self.smoothC(c, self._exponent) | |||||
| t = (1.0 - c)*self._thicknessMax + c * self._thicknessMin | |||||
| it.object.attribute.thickness = (t/2.0, t/2.0) | |||||
| i = i+1 | |||||
| it.increment() | |||||
| def smoothC(self, a, exp): | |||||
| return pow(float(a), exp) * pow(2.0, exp) | |||||
| class pySLERPThicknessShader(StrokeShader): | class pySLERPThicknessShader(StrokeShader): | ||||
| """ | """ | ||||
| Context not available. | |||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._thicknessMin = thicknessMin | self._thicknessMin = thicknessMin | ||||
| self._thicknessMax = thicknessMax | self._thicknessMax = thicknessMax | ||||
| self._omega = omega | self.omega = omega | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| slength = stroke.length_2d | n = len(stroke) | ||||
| tmp = 0.33*slength | maxT = min(self._thicknessMax, 0.33 * stroke.length_2d) | ||||
| maxT = self._thicknessMax | omega = self.omega | ||||
| if tmp < self._thicknessMax: | for i, svert in enumerate(stroke): | ||||
| maxT = tmp | c = i / n | ||||
| n = stroke.stroke_vertices_size() | if i < (n * 0.5): | ||||
| i = 0 | t = sin((1-c) * omega) / sinh(omega) * self._thicknessMin + sin(c * omega) / sinh(omega) * maxT | ||||
| it = stroke.stroke_vertices_begin() | |||||
| while not it.is_end: | |||||
| c = float(i)/float(n) | |||||
| if i < float(n)/2.0: | |||||
| t = sin((1-c)*self._omega)/sinh(self._omega)*self._thicknessMin + sin(c*self._omega)/sinh(self._omega) * maxT | |||||
| else: | else: | ||||
| t = sin((1-c)*self._omega)/sinh(self._omega)*maxT + sin(c*self._omega)/sinh(self._omega) * self._thicknessMin | t = sin((1-c) * omega) / sinh(omega) * maxT + sin(c * omega) / sinh(omega) * self._thicknessMin | ||||
| it.object.attribute.thickness = (t/2.0, t/2.0) | svert.attribute.thickness = (t/2.0, t/2.0) | ||||
| i = i+1 | |||||
| it.increment() | |||||
| ## needs some love | |||||
| class pyTVertexThickenerShader(StrokeShader): ## FIXME | class pyTVertexThickenerShader(StrokeShader): ## FIXME | ||||
| """ | """ | ||||
| Thickens TVertices (visual intersections between two edges) | Thickens TVertices (visual intersections between two edges) | ||||
| Context not available. | |||||
| self._n = n | self._n = n | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | it = iter(stroke) | ||||
| predTVertex = pyVertexNatureUP0D(Nature.T_VERTEX) | n = self._n | ||||
| while not it.is_end: | a = self._a | ||||
| if predTVertex(it) == 1: | for svert in it: | ||||
| it2 = StrokeVertexIterator(it) | if (svert.nature & Nature.T_VERTEX): | ||||
| it2.increment() | it2 = it.incremented(); | ||||
| if not (it.is_begin or it2.is_end): | if not (it.is_begin or it2.is_end): | ||||
| it.increment() | it.increment() | ||||
| continue | continue | ||||
| n = self._n | |||||
| a = self._a | |||||
| if it.is_begin: | if it.is_begin: | ||||
| it3 = StrokeVertexIterator(it) | for count, svert in zip(range(n), StrokeVertexIterator(it)): | ||||
| count = 0 | |||||
| while (not it3.is_end) and count < n: | |||||
| att = it3.object.attribute | |||||
| (tr, tl) = att.thickness | |||||
| r = (a-1.0)/float(n-1)*(float(n)/float(count+1) - 1) + 1 | r = (a-1.0)/float(n-1)*(float(n)/float(count+1) - 1) + 1 | ||||
| #r = (1.0-a)/float(n-1)*count + a | (tr, tl) = svert.attribute.thickness | ||||
| att.thickness = (r*tr, r*tl) | svert.attribute.thickness = (r * tr, r * tl) | ||||
| it3.increment() | |||||
| count = count + 1 | |||||
| if it2.is_end: | if it2.is_end: | ||||
| it4 = StrokeVertexIterator(it) | for count, svert in zip(range(n), it.reversed()): | ||||
| count = 0 | |||||
| while (not it4.is_begin) and count < n: | |||||
| att = it4.object.attribute | |||||
| (tr, tl) = att.thickness | |||||
| r = (a-1.0)/float(n-1)*(float(n)/float(count+1) - 1) + 1 | |||||
| #r = (1.0-a)/float(n-1)*count + a | |||||
| att.thickness = (r*tr, r*tl) | |||||
| it4.decrement() | |||||
| count = count + 1 | |||||
| if it4.is_begin: | |||||
| att = it4.object.attribute | |||||
| (tr, tl) = att.thickness | |||||
| r = (a-1.0)/float(n-1)*(float(n)/float(count+1) - 1) + 1 | r = (a-1.0)/float(n-1)*(float(n)/float(count+1) - 1) + 1 | ||||
| #r = (1.0-a)/float(n-1)*count + a | (tr, tl) = svert.attribute.thickness | ||||
| att.thickness = (r*tr, r*tl) | svert.attribute.thickness = (r * tr, r * tl) | ||||
| it.increment() | |||||
| class pyImportance2DThicknessShader(StrokeShader): | class pyImportance2DThicknessShader(StrokeShader): | ||||
| Context not available. | |||||
| """ | """ | ||||
| def __init__(self, x, y, w, kmin, kmax): | def __init__(self, x, y, w, kmin, kmax): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._x = x | self._origin = Vector((x, y)) | ||||
| self._y = y | |||||
| self._w = float(w) | self._w = float(w) | ||||
| self._kmin = float(kmin) | self._kmin = float(kmin) | ||||
| self._kmax = float(kmax) | self._kmax = float(kmax) | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| origin = Vector((self._x, self._y)) | for svert in stroke: | ||||
| it = stroke.stroke_vertices_begin() | d = (svert.point_2d - self._origin).length | ||||
| while not it.is_end: | k = (self._kmin if (d > self.w) else | ||||
| v = it.object | (self._kmax * (self._w-d) + self._kmin * d) / self._w) | ||||
| d = (v.point_2d - self._origin).length | |||||
| if d > self._w: | (tr, tl) = svert.attribute.thickness | ||||
| k = self._kmin | svert.attribute.thickness = (k*tr/2.0, k*tl/2.0) | ||||
| else: | |||||
| k = (self._kmax*(self._w-d) + self._kmin*d)/self._w | |||||
| att = v.attribute | |||||
| (tr, tl) = att.thickness | |||||
| att.thickness = (k*tr/2.0, k*tl/2.0) | |||||
| it.increment() | |||||
| class pyImportance3DThicknessShader(StrokeShader): | class pyImportance3DThicknessShader(StrokeShader): | ||||
| Context not available. | |||||
| """ | """ | ||||
| def __init__(self, x, y, z, w, kmin, kmax): | def __init__(self, x, y, z, w, kmin, kmax): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._x = x | self._origin = Vector((x, y, z)) | ||||
| self._y = y | |||||
| self._z = z | |||||
| self._w = float(w) | self._w = float(w) | ||||
| self._kmin = float(kmin) | self._kmin = float(kmin) | ||||
| self._kmax = float(kmax) | self._kmax = float(kmax) | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| origin = Vector((self._x, self._y, self._z)) | for svert in stroke: | ||||
| it = stroke.stroke_vertices_begin() | d = (svert.point_3d - self._origin).length | ||||
| while not it.is_end: | k = (self._kmin if (d > self._w) else | ||||
| v = it.object | (self._kmax * (self._w-d) + self._kmin * d) / self._w) | ||||
| p = v.point_3d | |||||
| d = (p-origin).length | |||||
| if d > self._w: | |||||
| k = self._kmin | |||||
| else: | |||||
| k = (self._kmax*(self._w-d) + self._kmin*d)/self._w | |||||
| att = v.attribute | |||||
| (tr, tl) = att.thickness | |||||
| att.thickness = (k*tr/2.0, k*tl/2.0) | |||||
| it.increment() | |||||
| (tr, tl) = svert.attribute.thickness | |||||
| svert.attribute.thickness = (k*tr/2.0, k*tl/2.0) | |||||
| class pyZDependingThicknessShader(StrokeShader): | class pyZDependingThicknessShader(StrokeShader): | ||||
| """ | """ | ||||
| Context not available. | |||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self.__min = min | self.__min = min | ||||
| self.__max = max | self.__max = max | ||||
| self.__func = GetProjectedZF0D() | self.func = GetProjectedZF0D() | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | it = Interface0DIterator(iter(stroke)) | ||||
| z_min = 1 | z_indices = tuple(self.func(it) for obj in it) | ||||
| z_max = 0 | z_min, z_max = min(min(z_indices), 1), max(max(z_indices), 0) | ||||
Not Done Inline ActionsNow this can be it = Interface0DIterator(stroke). kjym3: Now this can be `it = Interface0DIterator(stroke)`. | |||||
| while not it.is_end: | |||||
| z = self.__func(Interface0DIterator(it)) | |||||
| if z < z_min: | |||||
| z_min = z | |||||
| if z > z_max: | |||||
| z_max = z | |||||
| it.increment() | |||||
| z_diff = 1 / (z_max - z_min) | z_diff = 1 / (z_max - z_min) | ||||
| it = stroke.stroke_vertices_begin() | |||||
| while not it.is_end: | for svert, z_index in zip(stroke, z_indices): | ||||
| z = (self.__func(Interface0DIterator(it)) - z_min) * z_diff | z = (z_index - z_min) * z_diff | ||||
| thickness = (1 - z) * self.__max + z * self.__min | thickness = (1 - z) * self.__max + z * self.__min | ||||
| it.object.attribute.thickness = (thickness, thickness) | svert.attribute.thickness = (thickness, thickness) | ||||
| it.increment() | |||||
| ## color modifiers | |||||
| ################## | """-- Color & Alpha Stroke Shaders --""" | ||||
| class pyConstantColorShader(StrokeShader): | class pyConstantColorShader(StrokeShader): | ||||
| """ | """ | ||||
| Assigns a constant color to the stroke | Assigns a constant color to the stroke | ||||
| """ | """ | ||||
| def __init__(self, r, g, b, a=1): | def __init__(self,r,g,b, a = 1): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._r = r | self._color = (r, g, b) | ||||
| self._g = g | |||||
| self._b = b | |||||
| self._a = a | self._a = a | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | for svert in stroke: | ||||
| while not it.is_end: | svert.attribute.color = self._color | ||||
| att = it.object.attribute | svert.attribute.alpha = self._a | ||||
| att.color = (self._r, self._g, self._b) | |||||
| att.alpha = self._a | |||||
| it.increment() | |||||
| class pyIncreasingColorShader(StrokeShader): | class pyIncreasingColorShader(StrokeShader): | ||||
| Context not available. | |||||
| """ | """ | ||||
| def __init__(self,r1,g1,b1,a1, r2,g2,b2,a2): | def __init__(self,r1,g1,b1,a1, r2,g2,b2,a2): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._c1 = [r1,g1,b1,a1] | # use 4d vector to simplify math | ||||
| self._c2 = [r2,g2,b2,a2] | self._c1 = Vector((r1,g1,b1,a1)) | ||||
| self._c2 = Vector((r2,g2,b2,a2)) | |||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| n = stroke.stroke_vertices_size() - 1 | n = len(stroke) - 1 | ||||
| inc = 0 | |||||
| it = stroke.stroke_vertices_begin() | for i, svert in enumerate(stroke): | ||||
| while not it.is_end: | c = i / n | ||||
| att = it.object.attribute | color = (1 - c) * self._c1 + c * self._c2 | ||||
| c = float(inc) / float(n) | ## adjust this later | ||||
| svert.attribute.color = color[:3] | |||||
| att.color = ((1.0 - c) * self._c1[0] + c * self._c2[0], | svert.attribute.alpha = color[3] | ||||
| (1.0 - c) * self._c1[1] + c * self._c2[1], | |||||
| (1.0 - c) * self._c1[2] + c * self._c2[2]) | |||||
| att.alpha = (1.0 - c) * self._c1[3] + c * self._c2[3] | |||||
| inc = inc + 1 | |||||
| it.increment() | |||||
| class pyInterpolateColorShader(StrokeShader): | class pyInterpolateColorShader(StrokeShader): | ||||
| Context not available. | |||||
| """ | """ | ||||
| def __init__(self,r1,g1,b1,a1, r2,g2,b2,a2): | def __init__(self,r1,g1,b1,a1, r2,g2,b2,a2): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._c1 = [r1,g1,b1,a1] | # use 4d vector to simplify math | ||||
| self._c2 = [r2,g2,b2,a2] | self._c1 = Vector((r1,g1,b1,a1)) | ||||
| self._c2 = Vector((r2,g2,b2,a2)) | |||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| n = stroke.stroke_vertices_size() - 1 | n = len(stroke) - 1 | ||||
| inc = 0 | for i, svert in enumerate(stroke): | ||||
| it = stroke.stroke_vertices_begin() | c = 1.0 - 2.0 * abs((float(i)/float(n)) - 0.5) | ||||
Not Done Inline Actionsc = 1.0 - 2.0 * abs(i / n - 0.5) kjym3: c = 1.0 - 2.0 * abs(i / n - 0.5) | |||||
| while not it.is_end: | color = (1.0 - c) * self._c1 + c * self._c2 | ||||
| att = it.object.attribute | svert.attribute.color = color[:3] | ||||
| u = float(inc) / float(n) | svert.attribute.alpha = color[3] | ||||
| c = 1.0 - 2.0 * abs(u - 0.5) | |||||
| att.color = ((1.0 - c) * self._c1[0] + c * self._c2[0], | class pyModulateAlphaShader(StrokeShader): | ||||
| (1.0 - c) * self._c1[1] + c * self._c2[1], | """ | ||||
| (1.0 - c) * self._c1[2] + c * self._c2[2]) | Limits the stroke's alpha between a min and max value. | ||||
| att.alpha = (1.0-c) * self._c1[3] + c * self._c2[3] | """ | ||||
| inc = inc+1 | def __init__(self, min=0, max=1): | ||||
| it.increment() | StrokeShader.__init__(self) | ||||
| self.__min = min | |||||
| self.__max = max | |||||
| def shade(self, stroke): | |||||
| for svert in stroke: | |||||
| alpha = svert.attribute.alpha | |||||
| alpha = alpha * svert.point.y * 0.0025 | |||||
| alpha = min(max(alpha, self.__min), self.__max) | |||||
| svert.attribute.alpha = alpha | |||||
| class pyMaterialColorShader(StrokeShader): | class pyMaterialColorShader(StrokeShader): | ||||
| Context not available. | |||||
| def __init__(self, threshold=50): | def __init__(self, threshold=50): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._threshold = threshold | self._threshold = threshold | ||||
| self._func = MaterialF0D() | |||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | |||||
| func = MaterialF0D() | |||||
| xn = 0.312713 | xn = 0.312713 | ||||
| yn = 0.329016 | yn = 0.329016 | ||||
| Yn = 1.0 | Yn = 1.0 | ||||
| un = 4.* xn / (-2.*xn + 12.*yn + 3.) | un = 4.* xn / (-2.*xn + 12.*yn + 3.) | ||||
| vn= 9.* yn / (-2.*xn + 12.*yn +3.) | vn= 9.* yn / (-2.*xn + 12.*yn +3.) | ||||
| while not it.is_end: | it_0d = Interface0DIterator(iter(stroke)) | ||||
| mat = func(Interface0DIterator(it)) | for svert, _ in zip(stroke, it_0d): | ||||
| mat = self._func(it_0d) | |||||
Not Done Inline Actionsit = Interface0DIterator(stroke) for svert in it: mat = self._func(it) kjym3: it = Interface0DIterator(stroke)
for svert in it:
mat = self._func(it)
| |||||
| r = mat.diffuse[0] | r, g, b, *rest = mat.diffuse | ||||
| g = mat.diffuse[1] | |||||
| b = mat.diffuse[2] | |||||
| X = 0.412453*r + 0.35758 *g + 0.180423*b | X = 0.412453 * r + 0.35758 * g + 0.180423 * b | ||||
| Y = 0.212671*r + 0.71516 *g + 0.072169*b | Y = 0.212671 * r + 0.71516 * g + 0.072169 * b | ||||
| Z = 0.019334*r + 0.119193*g + 0.950227*b | Z = 0.019334 * r + 0.11919 * g + 0.950227 * b | ||||
| if (X, Y, Z) == (0, 0, 0): | if (X, Y, Z) == (0, 0, 0): | ||||
| X = 0.01 | X = Y = Z = 0.1 | ||||
| Y = 0.01 | |||||
| Z = 0.01 | |||||
| u = 4.*X / (X + 15.*Y + 3.*Z) | u = 4.*X / (X + 15.*Y + 3.*Z) | ||||
| v = 9.*Y / (X + 15.*Y + 3.*Z) | v = 9.*Y / (X + 15.*Y + 3.*Z) | ||||
| Context not available. | |||||
| V = 13. * L * (v - vn) | V = 13. * L * (v - vn) | ||||
| if L > self._threshold: | if L > self._threshold: | ||||
| L = L/1.3 | L /= 1.3 | ||||
| U = U+10 | U += 10. | ||||
| else: | else: | ||||
| L = L +2.5*(100-L)/5. | L = L + 2.5 * (100-L) * 0.2 | ||||
| U = U/3.0 | U /= 3.0 | ||||
| V = V/3.0 | V /= 3.0 | ||||
| u = U / (13. * L) + un | |||||
| v = V / (13. * L) + vn | u = U / (13.0 * L) + un | ||||
| v = V / (13.0 * L) + vn | |||||
| Y = Yn * pow(((L+16.)/116.), 3.) | Y = Yn * pow(((L+16.)/116.), 3.) | ||||
| X = -9.0 * Y * u / ((u - 4.0) * v - u * v) | X = -9. * Y * u / ((u - 4.)* v - u * v) | ||||
| Z = (9.0 * Y - 15.0 * v * Y - v * X) / (3.0 * v) | Z = (9. * Y - 15*v*Y - v*X) /( 3. * v) | ||||
| r = 3.240479 * X - 1.53715 * Y - 0.498535 * Z | r = 3.240479 * X - 1.53715 * Y - 0.498535 * Z | ||||
| g = -0.969256 * X + 1.875991 * Y + 0.041556 * Z | g = -0.969256 * X + 1.875991 * Y + 0.041556 * Z | ||||
| b = 0.055648 * X - 0.204043 * Y + 1.057311 * Z | b = 0.055648 * X - 0.204043 * Y + 1.057311 * Z | ||||
| r = max(0,r) | r = max(0, r) | ||||
| g = max(0,g) | g = max(0, g) | ||||
| b = max(0,b) | b = max(0, b) | ||||
| it.object.attribute.color = (r, g, b) | svert.attribute.color = (r, g, b) | ||||
| it.increment() | |||||
| class pyRandomColorShader(StrokeShader): | class pyRandomColorShader(StrokeShader): | ||||
| Context not available. | |||||
| def __init__(self, s=1): | def __init__(self, s=1): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| random.seed(s) | random.seed(s) | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| ## pick a random color | ## pick a random color | ||||
| c0 = float(random.uniform(15,75))/100.0 | random_color = (random.uniform(15, 75) * 0.01, | ||||
| c1 = float(random.uniform(15,75))/100.0 | random.uniform(15, 75) * 0.01, | ||||
| c2 = float(random.uniform(15,75))/100.0 | random.uniform(15, 75) * 0.01) | ||||
| #print(c0, c1, c2) | |||||
| it = stroke.stroke_vertices_begin() | |||||
| while not it.is_end: | |||||
| it.object.attribute.color = (c0,c1,c2) | |||||
| it.increment() | |||||
| for svert in stroke: | |||||
| svert.attribute.color = random_color | |||||
| class py2DCurvatureColorShader(StrokeShader): | class py2DCurvatureColorShader(StrokeShader): | ||||
| """ | """ | ||||
| Context not available. | |||||
| A higher curvature will yield a brighter color | A higher curvature will yield a brighter color | ||||
| """ | """ | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | |||||
| func = Curvature2DAngleF0D() | func = Curvature2DAngleF0D() | ||||
| while not it.is_end: | it = iter(stroke) | ||||
| for svert in it: | |||||
| c = func(Interface0DIterator(it)) | c = func(Interface0DIterator(it)) | ||||
| if c < 0: | if c < 0 and bpy.app.debug_freestyle: | ||||
Not Done Inline Actionsit = Interface0DIterator(stroke) for svert in it: c = func(it) kjym3: it = Interface0DIterator(stroke)
for svert in it:
c = func(it)
| |||||
| print("negative 2D curvature") | print("py2DCurvatureColorShader: negative 2D curvature") | ||||
| color = 10.0 * c/3.1415 | color = 10.0 * c / pi | ||||
Not Done Inline ActionsThe constant is 0.01 in the original code. kjym3: The constant is 0.01 in the original code. | |||||
| it.object.attribute.color = (color, color, color) | svert.attribute.color = (color, color, color) | ||||
| it.increment() | |||||
| class pyTimeColorShader(StrokeShader): | class pyTimeColorShader(StrokeShader): | ||||
| Context not available. | |||||
| def __init__(self, step=0.01): | def __init__(self, step=0.01): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._step = step | self._step = step | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| for i, svert in enumerate(iter(stroke)): | for i, svert in enumerate(stroke): | ||||
| c = i * self._step | c = i * self._step | ||||
| svert.attribute.color = (c,c,c) | svert.attribute.color = (c,c,c) | ||||
| ## geometry modifiers | """-- Geometry Stroke Shaders --""" | ||||
| class pySamplingShader(StrokeShader): | class pySamplingShader(StrokeShader): | ||||
| """ | """ | ||||
| Context not available. | |||||
| def __init__(self, sampling): | def __init__(self, sampling): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._sampling = sampling | self._sampling = sampling | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| stroke.resample(float(self._sampling)) | stroke.resample(float(self._sampling)) | ||||
| stroke.update_length() | stroke.update_length() | ||||
| class pyBackboneStretcherShader(StrokeShader): | class pyBackboneStretcherShader(StrokeShader): | ||||
| """ | """ | ||||
| Stretches the stroke's backbone by a given length (in pixels) | Stretches the stroke's backbone by a given length (in pixels) | ||||
| Context not available. | |||||
| def __init__(self, l): | def __init__(self, l): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._l = l | self._l = l | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it0 = stroke.stroke_vertices_begin() | # get first two vertices | ||||
| it1 = StrokeVertexIterator(it0) | it = iter(stroke) | ||||
| it1.increment() | first = (next(it), next(it)) | ||||
| itn = stroke.stroke_vertices_end() | # get final two vertices | ||||
| itn.decrement() | it = reversed(stroke) | ||||
| itn_1 = StrokeVertexIterator(itn) | final = (next(it), next(it)) | ||||
| itn_1.decrement() | # calculate new points and assign them | ||||
| v0 = it0.object | d1 = (first[0].point_2d - first[1].point_2d).normalized() | ||||
| v1 = it1.object | dn = (final[0].point_2d - final[1].point_2d).normalized() | ||||
| vn_1 = itn_1.object | first[0].point += d1 * self._l | ||||
| vn = itn.object | final[0].point += dn * self._l | ||||
| p0 = v0.point_2d | |||||
| pn = vn.point_2d | |||||
| p1 = v1.point_2d | |||||
| pn_1 = vn_1.point_2d | |||||
| d1 = (p0 - p1).normalized() | |||||
| dn = (pn - pn_1).normalized() | |||||
| newFirst = p0+d1*float(self._l) | |||||
| newLast = pn+dn*float(self._l) | |||||
| v0.point = newFirst | |||||
| vn.point = newLast | |||||
| stroke.update_length() | stroke.update_length() | ||||
| class pyLengthDependingBackboneStretcherShader(StrokeShader): | class pyLengthDependingBackboneStretcherShader(StrokeShader): | ||||
| """ | """ | ||||
| Stretches the stroke's backbone proportional to the stroke's length | Stretches the stroke's backbone proportional to the stroke's length | ||||
| NOTE: you'll probably want an l somewhere between (0.5 - 0). A value that | |||||
| is too high may yield unexpected results. | |||||
| """ | """ | ||||
| def __init__(self, l): | def __init__(self, l): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._l = l | self._l = l | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| l = stroke.length_2d | stretch = self._l * stroke.length_2d | ||||
| stretch = self._l*l | # get first two vertices | ||||
| it0 = stroke.stroke_vertices_begin() | it = iter(stroke) | ||||
| it1 = StrokeVertexIterator(it0) | first = (next(it), next(it)) | ||||
| it1.increment() | # get final two vertices | ||||
| itn = stroke.stroke_vertices_end() | it = reversed(stroke) | ||||
| itn.decrement() | final = (next(it), next(it)) | ||||
| itn_1 = StrokeVertexIterator(itn) | # calculate new points and assign them | ||||
| itn_1.decrement() | d1 = (first[0].point_2d - first[1].point_2d).normalized() | ||||
| v0 = it0.object | dn = (final[0].point_2d - final[1].point_2d).normalized() | ||||
| v1 = it1.object | first[0].point += d1 * stretch | ||||
| vn_1 = itn_1.object | final[0].point += dn * stretch | ||||
| vn = itn.object | |||||
| p0 = v0.point_2d | |||||
| pn = vn.point_2d | |||||
| p1 = v1.point_2d | |||||
| pn_1 = vn_1.point_2d | |||||
| d1 = (p0 - p1).normalized() | |||||
| dn = (pn - pn_1).normalized() | |||||
| newFirst = p0+d1*float(stretch) | |||||
| newLast = pn+dn*float(stretch) | |||||
| v0.point = newFirst | |||||
| vn.point = newLast | |||||
| stroke.update_length() | stroke.update_length() | ||||
| ## reversed attribute would be handy to set here | |||||
| class pyGuidingLineShader(StrokeShader): | class pyGuidingLineShader(StrokeShader): | ||||
| """ | """ | ||||
| Replaces the stroke by its corresponding tangent | Replaces the stroke by its corresponding tangent | ||||
| """ | """ | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() ## get the first vertex | ## get the tangent direction | ||||
| itlast = stroke.stroke_vertices_end() ## | t = stroke[-1].point - stroke[0].point | ||||
| itlast.decrement() ## get the last one | ## look for the stroke middle vertex | ||||
| t = itlast.object.point - it.object.point ## tangent direction | itmiddle = iter(stroke) | ||||
| itmiddle = StrokeVertexIterator(it) ## | while itmiddle.object.u < 0.5: | ||||
| while itmiddle.object.u < 0.5: ## look for the stroke middle vertex | itmiddle.increment() | ||||
| itmiddle.increment() ## | center_vertex = itmiddle.object | ||||
| ## position all the vertices along the tangent for the right part | |||||
| it = StrokeVertexIterator(itmiddle) | it = StrokeVertexIterator(itmiddle) | ||||
| it.increment() | for svert in it: | ||||
| while not it.is_end: ## position all the vertices along the tangent for the right part | svert.point = center_vertex.point + t * (svert.u - center_vertex.u) | ||||
| it.object.point = itmiddle.object.point+t*(it.object.u-itmiddle.object.u) | |||||
| it.increment() | ## position all the vertices along the tangent for the left part | ||||
| it = StrokeVertexIterator(itmiddle) | it = StrokeVertexIterator(itmiddle) | ||||
| it.decrement() | for svert in it.reversed(): | ||||
| while not it.is_begin: ## position all the vertices along the tangent for the left part | svert.point = center_vertex.point - t * (center_vertex.u - svert.u) | ||||
| it.object.point = itmiddle.object.point-t*(itmiddle.object.u-it.object.u) | |||||
| it.decrement() | |||||
| it.object.point = itmiddle.object.point-t*itmiddle.object.u ## first vertex | |||||
| stroke.update_length() | |||||
| stroke.update_length() | |||||
| class pyBackboneStretcherNoCuspShader(StrokeShader): | class pyBackboneStretcherNoCuspShader(StrokeShader): | ||||
Not Done Inline ActionsHow about using the notation stroke[0], stroke[-1] and so on here? That makes the creation of iterators unnecessary. kjym3: How about using the notation `stroke[0]`, `stroke[-1]` and so on here? That makes the creation… | |||||
| """ | """ | ||||
| Context not available. | |||||
| self._l = l | self._l = l | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it0 = stroke.stroke_vertices_begin() | # get first two vertices | ||||
| it1 = StrokeVertexIterator(it0) | it = iter(stroke) | ||||
| it1.increment() | v0, v1 = (next(it), next(it)) | ||||
| itn = stroke.stroke_vertices_end() | # get final two vertices | ||||
| itn.decrement() | it = reversed(stroke) | ||||
| itn_1 = StrokeVertexIterator(itn) | vn, vn_1 = (next(it), next(it)) | ||||
| itn_1.decrement() | |||||
| v0 = it0.object | if not (v0.nature & Nature.CUSP) and not (v1.nature & Nature.CUSP): | ||||
| v1 = it1.object | |||||
| if (v0.nature & Nature.CUSP) == 0 and (v1.nature & Nature.CUSP) == 0: | |||||
| d1 = (v0.point - v1.point).normalized() | d1 = (v0.point - v1.point).normalized() | ||||
| newFirst = v0.point+d1*float(self._l) | v0.point += d1 * self._l | ||||
| v0.point = newFirst | |||||
| vn_1 = itn_1.object | if not (vn.nature & Nature.CUSP) and not (vn_1.nature & Nature.CUSP): | ||||
| vn = itn.object | |||||
| if (vn.nature & Nature.CUSP) == 0 and (vn_1.nature & Nature.CUSP) == 0: | |||||
| dn = (vn.point - vn_1.point).normalized() | dn = (vn.point - vn_1.point).normalized() | ||||
| newLast = vn.point + dn * float(self._l) | vn.point += dn * self._l | ||||
| vn.point = newLast | |||||
| stroke.update_length() | stroke.update_length() | ||||
| Context not available. | |||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._lambda = lambda1 | self._lambda = lambda1 | ||||
| self._nbIter = nbIter | self._nbIter = nbIter | ||||
| self._normalInfo = Normal2DF0D() | self._normal = Normal2DF0D() | ||||
| self._curvatureInfo = Curvature2DAngleF0D() | self._angle = Curvature2DAngleF0D() | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| for i in range (1, self._nbIter): | it = Interface0DIterator(stroke) | ||||
| it = stroke.stroke_vertices_begin() | it2 = StrokeVertexIterator(stroke) | ||||
| while not it.is_end: | for i, _ in zip(range(1, self._nbIter), it): | ||||
| v = it.object | b = it2.object.id | ||||
| p1 = v.point | points = (self._normal(it) * self._lambda * self._angle(it) for o in it) | ||||
| p2 = self._normalInfo(Interface0DIterator(it))*self._lambda*self._curvatureInfo(Interface0DIterator(it)) | for svert, point in zip(stroke, points): | ||||
| v.point = p1+p2 | svert.point += point | ||||
Not Done Inline ActionsThese nested for loops are difficult to read... Why not iterate self._nbIter-1 times and creating a new Interface0DIterator instance inside the loop? for i in range(1, self._nbIter):
it = Interface0DIterator(stroke)
for svert in it: # this works in Python
svert.point += self._normal(it) * self._lambda * self._angle(it)kjym3: These nested for loops are difficult to read... Why not iterate self._nbIter-1 times and… | |||||
Not Done Inline ActionsPlease, consider revising these nested for loops as suggested in https://developer.blender.org/D319?id=1881#inline-1886. kjym3: Please, consider revising these nested for loops as suggested in https://developer.blender. | |||||
| it.increment() | |||||
| stroke.update_length() | stroke.update_length() | ||||
| Context not available. | |||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self._l = l | self._l = l | ||||
| def check_vertex(self, v): | |||||
| return v.curvilinear_abscissa < self._l or v.stroke_length - v.curvilinear_abscissa < self._l | |||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| originalSize = stroke.stroke_vertices_size() | n = len(stroke) | ||||
| if originalSize < 4: | if n < 4: | ||||
| return | return None | ||||
| verticesToRemove = [] | |||||
| oldAttributes = [] | verticesToRemove = tuple(svert for svert in stroke if self.check_vertex(svert)) | ||||
| it = stroke.stroke_vertices_begin() | oldAttributes = tuple(StrokeAttribute(svert.attribute) for svert in stroke) | ||||
| while not it.is_end: | |||||
| v = it.object | if n - len(verticesToRemove) < 2: | ||||
| if v.curvilinear_abscissa < self._l or v.stroke_length-v.curvilinear_abscissa < self._l: | return None | ||||
| verticesToRemove.append(v) | |||||
| oldAttributes.append(StrokeAttribute(v.attribute)) | |||||
| it.increment() | |||||
| if originalSize-len(verticesToRemove) < 2: | |||||
| return | |||||
| for sv in verticesToRemove: | for sv in verticesToRemove: | ||||
| stroke.remove_vertex(sv) | stroke.remove_vertex(sv) | ||||
| stroke.update_length() | stroke.update_length() | ||||
| stroke.resample(originalSize) | stroke.resample(n) | ||||
| if stroke.stroke_vertices_size() != originalSize: | if len(stroke) != n and bpy.app.debug_freestyle: | ||||
| print("pyTipRemover: Warning: resampling problem") | print("pyTipRemover: Warning: resampling problem") | ||||
| it = stroke.stroke_vertices_begin() | |||||
| for a in oldAttributes: | for svert, a in zip(stroke, oldAttributes): | ||||
| if it.is_end: | svert.attribute = a | ||||
| break | |||||
| it.object.attribute = a | |||||
| it.increment() | |||||
| stroke.update_length() | stroke.update_length() | ||||
| Context not available. | |||||
| Removes t-vertices from the stroke | Removes t-vertices from the stroke | ||||
| """ | """ | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| if stroke.stroke_vertices_size() <= 3: | if len(stroke) < 4: | ||||
| return | return None | ||||
| predTVertex = pyVertexNatureUP0D(Nature.T_VERTEX) | |||||
| it = stroke.stroke_vertices_begin() | v0 = next(stroke.stroke_vertices_begin()) | ||||
| itlast = stroke.stroke_vertices_end() | vn = next(stroke.stroke_vertices_end()) | ||||
| itlast.decrement() | if (v0.nature & Nature.T_VERTEX): | ||||
| if predTVertex(it): | stroke.remove_vertex(v0) | ||||
| stroke.remove_vertex(it.object) | if (vn.nature & Nature.T_VERTEX): | ||||
| if predTVertex(itlast): | stroke.remove_vertex(vn) | ||||
| stroke.remove_vertex(itlast.object) | |||||
| stroke.update_length() | stroke.update_length() | ||||
| #class pyExtremitiesOrientationShader(StrokeShader): | |||||
| # def __init__(self, x1,y1,x2=0,y2=0): | |||||
| # StrokeShader.__init__(self) | |||||
| # self._v1 = Vector((x1,y1)) | |||||
| # self._v2 = Vector((x2,y2)) | |||||
| # def shade(self, stroke): | |||||
| # #print(self._v1.x,self._v1.y) | |||||
| # stroke.setBeginningOrientation(self._v1.x,self._v1.y) | |||||
| # stroke.setEndingOrientation(self._v2.x,self._v2.y) | |||||
| class pyHLRShader(StrokeShader): | class pyHLRShader(StrokeShader): | ||||
| """ | """ | ||||
| Controlls visibility based upon the quantative invisibility (QI) | Controlls visibility based upon the quantative invisibility (QI) | ||||
| based on hidden line removal (HLR) | based on hidden line removal (HLR) | ||||
| """ | """ | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| originalSize = stroke.stroke_vertices_size() | if len(stroke) < 4: | ||||
Not Done Inline ActionsUsing Iterface1D.vertices_begin() may further simplify the updated code (not tested): it = stroke.vertices_begin()
for svert in it:
svert.point += self._normal(it) * self._lambda * self._angle(it)It is noted that the .object property of Interface0DIterator gives a StrokeVertex instance if the iterator is created from Stroke. Python does not have a dynamic_cast operator available in C++, so casting to StrokeVertex is automatically done when the .object property value is returned. kjym3: Using Iterface1D.vertices_begin() may further simplify the updated code (not tested):
it =… | |||||
| if originalSize < 4: | return None | ||||
| return | it = iter(stroke) | ||||
| it = stroke.stroke_vertices_begin() | it2 = iter(stroke) | ||||
| invisible = 0 | it2.increment | ||||
Not Done Inline Actionsit2 = it.incremented() kjym3: it2 = it.incremented() | |||||
| it2 = StrokeVertexIterator(it) | |||||
| it2.increment() | for v1, v2 in zip(it, it2): | ||||
| fe = self.get_fedge(it.object, it2.object) | if (v1.nature & Nature.VIEW_VERTEX): | ||||
| if fe.viewedge.qi != 0: | visible = v1.get_fedge(v2).viewedge.qi | ||||
| invisible = 1 | |||||
| while not it2.is_end: | |||||
| v = it.object | |||||
| vnext = it2.object | |||||
| if (v.nature & Nature.VIEW_VERTEX) != 0: | |||||
| #if (v.nature & Nature.T_VERTEX) != 0: | |||||
| fe = self.get_fedge(v, vnext) | |||||
| qi = fe.viewedge.qi | |||||
| if qi != 0: | |||||
| invisible = 1 | |||||
| else: | |||||
| invisible = 0 | |||||
| if invisible: | |||||
| v.attribute.visible = False | |||||
| it.increment() | |||||
| it2.increment() | |||||
| def get_fedge(self, it1, it2): | |||||
| return it1.get_fedge(it2) | |||||
| # broken and a mess | |||||
| class pyTVertexOrientationShader(StrokeShader): | |||||
| def __init__(self): | |||||
| StrokeShader.__init__(self) | |||||
| self._Get2dDirection = Orientation2DF1D() | |||||
| ## finds the TVertex orientation from the TVertex and | |||||
| ## the previous or next edge | |||||
| def findOrientation(self, tv, ve): | |||||
| mateVE = tv.get_mate(ve) | |||||
| if ve.qi != 0 or mateVE.qi != 0: | |||||
| ait = AdjacencyIterator(tv,1,0) | |||||
| winner = None | |||||
| incoming = True | |||||
| while not ait.is_end: | |||||
| ave = ait.object | |||||
| if ave.id != ve.id and ave.id != mateVE.id: | |||||
| winner = ait.object | |||||
| if not ait.isIncoming(): # FIXME | |||||
| incoming = False | |||||
| break | |||||
| ait.increment() | |||||
| if winner is not None: | |||||
| if not incoming: | |||||
| direction = self._Get2dDirection(winner.last_fedge) | |||||
| else: | |||||
| direction = self._Get2dDirection(winner.first_fedge) | |||||
| return direction | |||||
| return None | |||||
| def castToTVertex(self, cp): | |||||
| if cp.t2d() == 0.0: | |||||
| return cp.first_svertex.viewvertex | |||||
| elif cp.t2d() == 1.0: | |||||
| return cp.second_svertex.viewvertex | |||||
| return None | |||||
| def shade(self, stroke): | v1.attribute.visible = not visible | ||||
| it = stroke.stroke_vertices_begin() | |||||
| it2 = StrokeVertexIterator(it) | |||||
| it2.increment() | |||||
| ## case where the first vertex is a TVertex | |||||
| v = it.object | |||||
| if (v.nature & Nature.T_VERTEX) != 0: | |||||
| tv = self.castToTVertex(v) | |||||
| if tv is not None: | |||||
| ve = self.get_fedge(v, it2.object).viewedge | |||||
| dir = self.findOrientation(tv, ve) | |||||
| if dir is not None: | |||||
| #print(dir.x, dir.y) | |||||
| v.attribute.set_attribute_vec2("orientation", dir) | |||||
| while not it2.is_end: | |||||
| vprevious = it.object | |||||
| v = it2.object | |||||
| if (v.nature & Nature.T_VERTEX) != 0: | |||||
| tv = self.castToTVertex(v) | |||||
| if tv is not None: | |||||
| ve = self.get_fedge(vprevious, v).viewedge | |||||
| dir = self.findOrientation(tv, ve) | |||||
| if dir is not None: | |||||
| #print(dir.x, dir.y) | |||||
| v.attribute.set_attribute_vec2("orientation", dir) | |||||
| it.increment() | |||||
| it2.increment() | |||||
| ## case where the last vertex is a TVertex | |||||
| v = it.object | |||||
| if (v.nature & Nature.T_VERTEX) != 0: | |||||
| itPrevious = StrokeVertexIterator(it) | |||||
| itPrevious.decrement() | |||||
| tv = self.castToTVertex(v) | |||||
| if tv is not None: | |||||
| ve = self.get_fedge(itPrevious.object, v).viewedge | |||||
| dir = self.findOrientation(tv, ve) | |||||
| if dir is not None: | |||||
| #print(dir.x, dir.y) | |||||
| v.attribute.set_attribute_vec2("orientation", dir) | |||||
| def get_fedge(self, it1, it2): | |||||
| return it1.get_fedge(it2) | |||||
| class pySinusDisplacementShader(StrokeShader): | class pySinusDisplacementShader(StrokeShader): | ||||
Not Done Inline ActionsNo return value is required. kjym3: No return value is required. | |||||
Not Done Inline Actionsin python, explicit is said to be better than implicit. flokkievids: in python, explicit is said to be better than implicit.
I like this better than the empty… | |||||
Not Done Inline ActionsI agree that explicit is better than implicit in Python. In this case, however, the shade() method is not supposed to return a value. Explicitly returning None appears weird to me. kjym3: I agree that explicit is better than implicit in Python. In this case, however, the `shade()`… | |||||
| Context not available. | |||||
| self._getNormal = Normal2DF0D() | self._getNormal = Normal2DF0D() | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | it = iter(stroke) | ||||
| while not it.is_end: | for svert in it: | ||||
| v = it.object | normal = self._getNormal(Interface0DIterator(it)) | ||||
Not Done Inline Actionsit = Interface0DIterator(stroke)
for svert in it:
normal = self._getNormal(it)kjym3: it = Interface0DIterator(stroke)
for svert in it:
normal = self._getNormal(it)
| |||||
| #print(self._getNormal.name) | a = self._a * (1 - 2 * (abs(svert.u - 0.5))) | ||||
| n = self._getNormal(Interface0DIterator(it)) | n = normal * a * cos(self._f * svert.u * 6.28) | ||||
| p = v.point | svert.point += n | ||||
| u = v.u | |||||
| a = self._a*(1-2*(abs(u-0.5))) | |||||
| n = n*a*cos(self._f*u*6.28) | |||||
| #print(n.x, n.y) | |||||
| v.point = p+n | |||||
| #v.point = v.point+n*a*cos(f*v.u) | |||||
| it.increment() | |||||
| stroke.update_length() | stroke.update_length() | ||||
| class pyPerlinNoise1DShader(StrokeShader): | class pyPerlinNoise1DShader(StrokeShader): | ||||
| """ | """ | ||||
| Displaces the stroke using the curvilinear abscissa. This means | Displaces the stroke using the curvilinear abscissa. This means | ||||
| Context not available. | |||||
| self.__oct = oct | self.__oct = oct | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | for svert in stroke: | ||||
| while not it.is_end: | i = svert.projected_x + svert.projected_y | ||||
| v = it.object | |||||
| i = v.projected_x + v.projected_y | |||||
| nres = self.__noise.turbulence1(i, self.__freq, self.__amp, self.__oct) | nres = self.__noise.turbulence1(i, self.__freq, self.__amp, self.__oct) | ||||
Not Done Inline ActionsUsing stroke[0] and stroke[-1] would be easier. kjym3: Using `stroke[0]` and `stroke[-1]` would be easier. | |||||
| v.point = (v.projected_x + nres, v.projected_y + nres) | svert.point = (svert.projected_x + nres, svert.projected_y + nres) | ||||
| it.increment() | |||||
| stroke.update_length() | stroke.update_length() | ||||
| Context not available. | |||||
| self.__oct = oct | self.__oct = oct | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | for svert in stroke: | ||||
| while not it.is_end: | nres = self.__noise.turbulence2(svert.point_2d, self.__freq, self.__amp, self.__oct) | ||||
Not Done Inline ActionsAn explicit int comparison qi == 0 is preferable to me. kjym3: An explicit int comparison `qi == 0` is preferable to me. | |||||
| v = it.object | svert.point = (svert.projected_x + nres, svert.projected_y + nres) | ||||
| nres = self.__noise.turbulence2(v.point_2d, self.__freq, self.__amp, self.__oct) | |||||
| v.point = (v.projected_x + nres, v.projected_y + nres) | |||||
| it.increment() | |||||
| stroke.update_length() | stroke.update_length() | ||||
| Context not available. | |||||
| self.__random_radius = random_radius | self.__random_radius = random_radius | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | p_min, p_max = stroke[0].point.copy(), stroke[0].point.copy() | ||||
| if it.is_end: | for svert in stroke: | ||||
| return | p = svert.point | ||||
| p_min = it.object.point.copy() | p_min.x = min(p_min.x, p.x) | ||||
| p_max = it.object.point.copy() | p_max.x = max(p_max.x, p.x) | ||||
| while not it.is_end: | p_min.y = min(p_min.y, p.y) | ||||
| p = it.object.point | p_max.y = max(p_max.y, p.y) | ||||
| if p.x < p_min.x: | |||||
| p_min.x = p.x | |||||
| if p.x > p_max.x: | |||||
| p_max.x = p.x | |||||
| if p.y < p_min.y: | |||||
| p_min.y = p.y | |||||
| if p.y > p_max.y: | |||||
| p_max.y = p.y | |||||
| it.increment() | |||||
| stroke.resample(32 * self.__turns) | stroke.resample(32 * self.__turns) | ||||
| sv_nb = stroke.stroke_vertices_size() | sv_nb = len(stroke) // self.__turns | ||||
| # print("min :", p_min.x, p_min.y) # DEBUG | |||||
| # print("mean :", p_sum.x, p_sum.y) # DEBUG | |||||
| # print("max :", p_max.x, p_max.y) # DEBUG | |||||
| # print("----------------------") # DEBUG | |||||
| ####################################################### | |||||
| sv_nb = sv_nb // self.__turns | |||||
| center = (p_min + p_max) / 2 | center = (p_min + p_max) / 2 | ||||
| radius = (center.x - p_min.x + center.y - p_min.y) / 2 | radius = (center.x - p_min.x + center.y - p_min.y) / 2 | ||||
| p_new = Vector((0.0, 0.0)) | p_new = Vector((0.0, 0.0)) | ||||
| ####################################################### | |||||
| R = self.__random_radius | R = self.__random_radius | ||||
| C = self.__random_center | C = self.__random_center | ||||
| i = 0 | |||||
| it = stroke.stroke_vertices_begin() | it = iter(stroke) | ||||
| for j in range(self.__turns): | for j in range(self.__turns): | ||||
| prev_radius = radius | prev_radius = radius | ||||
| prev_center = center | prev_center = center | ||||
| radius = radius + randint(-R, R) | radius = radius + randint(-R, R) | ||||
| center = center + Vector((randint(-C, C), randint(-C, C))) | center = center + Vector((randint(-C, C), randint(-C, C))) | ||||
| while i < sv_nb and not it.is_end: | for i, svert in zip(range(sv_nb), it): | ||||
| t = float(i) / float(sv_nb - 1) | t = float(i) / float(sv_nb - 1) | ||||
| r = prev_radius + (radius - prev_radius) * t | r = prev_radius + (radius - prev_radius) * t | ||||
| c = prev_center + (center - prev_center) * t | c = prev_center + (center - prev_center) * t | ||||
| p_new.x = c.x + r * cos(2 * pi * t) | p_new.x = c.x + r * cos(2 * pi * t) | ||||
| p_new.y = c.y + r * sin(2 * pi * t) | p_new.y = c.y + r * sin(2 * pi * t) | ||||
| it.object.point = p_new | svert.point = p_new | ||||
| i = i + 1 | |||||
| it.increment() | # remove exessive vertices | ||||
| i = 1 | if not it.is_end: | ||||
| verticesToRemove = [] | |||||
| while not it.is_end: | |||||
| verticesToRemove.append(it.object) | |||||
| it.increment() | it.increment() | ||||
| for sv in verticesToRemove: | verticesToRemove = (svert for svert in it) | ||||
| stroke.remove_vertex(sv) | for sv in verticesToRemove: | ||||
| stroke.remove_vertex(sv) | |||||
Not Done Inline ActionsI believe verticesToRemove = tuple(svert for svert in it) is safer (see https://developer.blender.org/D319?id=1502#inline-1884 ). kjym3: I believe `verticesToRemove = tuple(svert for svert in it)` is safer (see https://developer. | |||||
Not Done Inline ActionsI hadn't noticed yet. a change of test scene revealed this problem. fixed it now flokkievids: I hadn't noticed yet. a change of test scene revealed this problem. fixed it now | |||||
| stroke.update_length() | stroke.update_length() | ||||
| class pyBluePrintEllipsesShader(StrokeShader): | class pyBluePrintEllipsesShader(StrokeShader): | ||||
| """ | |||||
| Draws the silhouette of the object as an ellips | |||||
| """ | |||||
| def __init__(self, turns=1, random_radius=3, random_center=5): | def __init__(self, turns=1, random_radius=3, random_center=5): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self.__turns = turns | self.__turns = turns | ||||
| Context not available. | |||||
| self.__random_radius = random_radius | self.__random_radius = random_radius | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | p_min, p_max = stroke[0].point.copy(), stroke[0].point.copy() | ||||
| if it.is_end: | for svert in stroke: | ||||
| return | p = svert.point | ||||
| p_min = it.object.point.copy() | p_min.x = min(p_min.x, p.x) | ||||
| p_max = it.object.point.copy() | p_max.x = max(p_max.x, p.x) | ||||
| while not it.is_end: | p_min.y = min(p_min.y, p.y) | ||||
| p = it.object.point | p_max.y = max(p_max.y, p.y) | ||||
| if p.x < p_min.x: | |||||
| p_min.x = p.x | |||||
| if p.x > p_max.x: | |||||
| p_max.x = p.x | |||||
| if p.y < p_min.y: | |||||
| p_min.y = p.y | |||||
| if p.y > p_max.y: | |||||
| p_max.y = p.y | |||||
| it.increment() | |||||
| stroke.resample(32 * self.__turns) | stroke.resample(32 * self.__turns) | ||||
| sv_nb = stroke.stroke_vertices_size() | sv_nb = len(stroke) // self.__turns | ||||
| sv_nb = sv_nb // self.__turns | |||||
| center = (p_min + p_max) / 2 | center = (p_min + p_max) / 2 | ||||
| radius = center - p_min | radius = center - p_min | ||||
| p_new = Vector((0.0, 0.0)) | p_new = Vector((0.0, 0.0)) | ||||
| ####################################################### | |||||
| R = self.__random_radius | R = self.__random_radius | ||||
| C = self.__random_center | C = self.__random_center | ||||
| i = 0 | |||||
| it = stroke.stroke_vertices_begin() | it = iter(stroke) | ||||
| for j in range(self.__turns): | for j in range(self.__turns): | ||||
| prev_radius = radius | prev_radius = radius | ||||
| prev_center = center | prev_center = center | ||||
| radius = radius + Vector((randint(-R, R), randint(-R, R))) | radius = radius + Vector((randint(-R, R), randint(-R, R))) | ||||
| center = center + Vector((randint(-C, C), randint(-C, C))) | center = center + Vector((randint(-C, C), randint(-C, C))) | ||||
| while i < sv_nb and not it.is_end: | for i, svert in zip(range(sv_nb), it): | ||||
| t = float(i) / float(sv_nb - 1) | t = float(i) / float(sv_nb - 1) | ||||
| r = prev_radius + (radius - prev_radius) * t | r = prev_radius + (radius - prev_radius) * t | ||||
| c = prev_center + (center - prev_center) * t | c = prev_center + (center - prev_center) * t | ||||
| p_new.x = c.x + r.x * cos(2 * pi * t) | p_new.x = c.x + r.x * cos(2 * pi * t) | ||||
| p_new.y = c.y + r.y * sin(2 * pi * t) | p_new.y = c.y + r.y * sin(2 * pi * t) | ||||
| it.object.point = p_new | svert.point = Vector((c.x + r.x * cos(2 * pi * t), | ||||
| i = i + 1 | c.y + r.y * sin(2 * pi * t))) | ||||
| it.increment() | |||||
| i = 1 | # remove exessive vertices | ||||
| verticesToRemove = [] | if not it.is_end: | ||||
| while not it.is_end: | |||||
| verticesToRemove.append(it.object) | |||||
| it.increment() | it.increment() | ||||
| for sv in verticesToRemove: | verticesToRemove = (svert for svert in it) | ||||
| stroke.remove_vertex(sv) | for sv in verticesToRemove: | ||||
| stroke.remove_vertex(sv) | |||||
| stroke.update_length() | stroke.update_length() | ||||
| class pyBluePrintSquaresShader(StrokeShader): | class pyBluePrintSquaresShader(StrokeShader): | ||||
| """ | |||||
| Draws the silhouette of the object as a square | |||||
| """ | |||||
| def __init__(self, turns=1, bb_len=10, bb_rand=0): | def __init__(self, turns=1, bb_len=10, bb_rand=0): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self.__turns = turns | self.__turns = turns # does not have any effect atm | ||||
| self.__bb_len = bb_len | self.__bb_len = bb_len | ||||
| self.__bb_rand = bb_rand | self.__bb_rand = bb_rand | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | # this condition will lead to errors later, end now | ||||
| if it.is_end: | if len(stroke) == 1: | ||||
| return | return None | ||||
| p_min = it.object.point.copy() | # copy the point; it may change in this proces | ||||
| p_max = it.object.point.copy() | p_min, p_max = stroke[0].point.copy(), stroke[0].point.copy() | ||||
| while not it.is_end: | # aquire the minimum and maximum values of the points | ||||
| p = it.object.point | for svert in stroke: | ||||
| if p.x < p_min.x: | p = svert.point | ||||
| p_min.x = p.x | p_min.x = min(p_min.x, p.x) | ||||
| if p.x > p_max.x: | p_max.x = max(p_max.x, p.x) | ||||
| p_max.x = p.x | p_min.y = min(p_min.y, p.y) | ||||
| if p.y < p_min.y: | p_max.y = max(p_max.y, p.y) | ||||
| p_min.y = p.y | |||||
| if p.y > p_max.y: | |||||
| p_max.y = p.y | |||||
| it.increment() | |||||
| stroke.resample(32 * self.__turns) | stroke.resample(32 * self.__turns) | ||||
| sv_nb = stroke.stroke_vertices_size() | num_segments = len(stroke) // self.__turns | ||||
| ####################################################### | f = num_segments // 4 | ||||
| sv_nb = sv_nb // self.__turns | # indices of the vertices that will form corners | ||||
| first = sv_nb // 4 | first, second, third, fourth = (f, f * 2, f * 3, num_segments) | ||||
| second = 2 * first | |||||
| third = 3 * first | # construct points of the backbone | ||||
| fourth = sv_nb | bb_len = self.__bb_len | ||||
| p_first = Vector((p_min.x - self.__bb_len, p_min.y)) | points = ( | ||||
| p_first_end = Vector((p_max.x + self.__bb_len, p_min.y)) | Vector((p_min.x - bb_len, p_min.y)), | ||||
| p_second = Vector((p_max.x, p_min.y - self.__bb_len)) | Vector((p_max.x + bb_len, p_min.y)), | ||||
| p_second_end = Vector((p_max.x, p_max.y + self.__bb_len)) | Vector((p_max.x, p_min.y - bb_len)), | ||||
| p_third = Vector((p_max.x + self.__bb_len, p_max.y)) | Vector((p_max.x, p_max.y + bb_len)), | ||||
| p_third_end = Vector((p_min.x - self.__bb_len, p_max.y)) | Vector((p_max.x + bb_len, p_max.y)), | ||||
| p_fourth = Vector((p_min.x, p_max.y + self.__bb_len)) | Vector((p_min.x - bb_len, p_max.y)), | ||||
| p_fourth_end = Vector((p_min.x, p_min.y - self.__bb_len)) | Vector((p_min.x, p_max.y + bb_len)), | ||||
| ####################################################### | Vector((p_min.x, p_min.y - bb_len)), | ||||
| R = self.__bb_rand | ) | ||||
| r = self.__bb_rand // 2 | |||||
| it = stroke.stroke_vertices_begin() | # add randomization to the points (if needed) | ||||
| visible = True | if self.__bb_rand: | ||||
| for j in range(self.__turns): | R, r = self.__bb_rand, self.__bb_rand // 2 | ||||
| p_first = p_first + Vector((randint(-R, R), randint(-r, r))) | |||||
| p_first_end = p_first_end + Vector((randint(-R, R), randint(-r, r))) | randomization_mat = ( | ||||
| p_second = p_second + Vector((randint(-r, r), randint(-R, R))) | Vector((randint(-R, R), randint(-r, r))), | ||||
| p_second_end = p_second_end + Vector((randint(-r, r), randint(-R, R))) | Vector((randint(-R, R), randint(-r, r))), | ||||
| p_third = p_third + Vector((randint(-R, R), randint(-r, r))) | Vector((randint(-r, r), randint(-R, R))), | ||||
| p_third_end = p_third_end + Vector((randint(-R, R), randint(-r, r))) | Vector((randint(-r, r), randint(-R, R))), | ||||
| p_fourth = p_fourth + Vector((randint(-r, r), randint(-R, R))) | Vector((randint(-R, R), randint(-r, r))), | ||||
| p_fourth_end = p_fourth_end + Vector((randint(-r, r), randint(-R, R))) | Vector((randint(-R, R), randint(-r, r))), | ||||
| vec_first = p_first_end - p_first | Vector((randint(-r, r), randint(-R, R))), | ||||
| vec_second = p_second_end - p_second | Vector((randint(-r, r), randint(-R, R))), | ||||
| vec_third = p_third_end - p_third | ) | ||||
| vec_fourth = p_fourth_end - p_fourth | |||||
| i = 0 | # combine both tuples | ||||
Not Done Inline ActionsA shorter form tuple(it) could be used here instead of (svert for svert in it), but maybe the former appears too cryptic. I prefer the latter. kjym3: A shorter form `tuple(it)` could be used here instead of `(svert for svert in it)`, but maybe… | |||||
Not Done Inline Actionsactually, verticesToRemove is a generator object here. altough the effect in this case may be minimal, I think it is a good practice to use generators when possible. flokkievids: actually, verticesToRemove is a generator object here. altough the effect in this case may be… | |||||
Not Done Inline ActionsI agree with you that using generators when possible is a good practice. Another concern I got is whether using a generator here is safe or not. The generator (svert for svert in it) keeps a pointer to a StrokeVertex in a sequence of objects, and does not keep all references of the StrokeVertex objects to be removed. This situation is similar to a common coding error in Python where items are removed from a list within a loop over the list item: for x in somelist:
somelist.remove(x)Since Python programmers are familiar with this common mistake, it would appear straightforward that verticesToRemove is a genuine list or tuple. kjym3: I agree with you that using generators when possible is a good practice.
Another concern I got… | |||||
| while i < sv_nb and not it.is_end: | points = tuple(p + rand for (p, rand) in zip(points, randomization_mat)) | ||||
| else: | |||||
| points = tuple(p for p in points) | |||||
Not Done Inline ActionsThis assignment seems redundant. kjym3: This assignment seems redundant. | |||||
| # substract even from uneven; result is length four tuple of vectors | |||||
| old_vecs = tuple(points[i + 1] - points[i] for i in range(0, 7, 2)) | |||||
| it = iter(stroke) | |||||
| verticesToRemove = list() | |||||
| for j in range(self.__turns): | |||||
| for i, svert in zip(range(num_segments), it): | |||||
| if i < first: | if i < first: | ||||
| p_new = p_first + vec_first * float(i)/float(first - 1) | svert.point = points[0] + old_vecs[0] * i / (first - 1) | ||||
| if i == first - 1: | svert.attribute.visible = (i != first - 1) | ||||
| visible = False | |||||
| elif i < second: | elif i < second: | ||||
| p_new = p_second + vec_second * float(i - first)/float(second - first - 1) | svert.point = points[2] + old_vecs[1] * (i - first) / (second - first - 1) | ||||
| if i == second - 1: | svert.attribute.visible = (i != second - 1) | ||||
| visible = False | |||||
| elif i < third: | elif i < third: | ||||
| p_new = p_third + vec_third * float(i - second)/float(third - second - 1) | svert.point = points[4] + old_vecs[2] * (i - second) / (third - second - 1) | ||||
| if i == third - 1: | svert.attribute.visible = (i != third - 1) | ||||
| visible = False | elif i < fourth: | ||||
| svert.point = points[6] + old_vecs[3] * (i - third) / (fourth - third - 1) | |||||
| svert.attribute.visible = (i != fourth - 1) | |||||
| else: | else: | ||||
| p_new = p_fourth + vec_fourth * float(i - third)/float(fourth - third - 1) | # special case; remove these vertices | ||||
Not Done Inline ActionsI guess p_mean = (1 / n) * sum(svert.point for svert in stroke) would suffice? kjym3: I guess `p_mean = (1 / n) * sum(svert.point for svert in stroke)` would suffice? | |||||
Not Done Inline ActionsIt doesn't, sadly. it raises: flokkievids: It doesn't, sadly. it raises:
AttributeError: Vector addition: (int + Vector) invalid type for… | |||||
Not Done Inline ActionsOk, sum() takes two arguments and the second is the initial value, which in this case should be Vector((0, 0)). No problem then. kjym3: Ok, sum() takes two arguments and the second is the initial value, which in this case should be… | |||||
| if i == fourth - 1: | verticesToRemove.append(svert) | ||||
| visible = False | |||||
| if it.object is None: | # remove exessive vertices | ||||
| i = i + 1 | if not it.is_end: | ||||
| it.increment() | |||||
| if not visible: | |||||
| visible = True | |||||
| continue | |||||
| it.object.point = p_new | |||||
| it.object.attribute.visible = visible | |||||
| if not visible: | |||||
| visible = True | |||||
| i = i + 1 | |||||
| it.increment() | |||||
| verticesToRemove = [] | |||||
| while not it.is_end: | |||||
| verticesToRemove.append(it.object) | |||||
| it.increment() | it.increment() | ||||
| for sv in verticesToRemove: | verticesToRemove += [svert for svert in it] | ||||
| stroke.remove_vertex(sv) | print(len(verticesToRemove)) | ||||
Not Done Inline ActionsPlease, remove this debug print. kjym3: Please, remove this debug print. | |||||
| for sv in verticesToRemove: | |||||
| stroke.remove_vertex(sv) | |||||
| stroke.update_length() | stroke.update_length() | ||||
| # needs a docstring | |||||
| class pyBluePrintDirectedSquaresShader(StrokeShader): | class pyBluePrintDirectedSquaresShader(StrokeShader): | ||||
| """ | |||||
| Replaces the stroke with a directed square | |||||
| """ | |||||
| def __init__(self, turns=1, bb_len=10, mult=1): | def __init__(self, turns=1, bb_len=10, mult=1): | ||||
| StrokeShader.__init__(self) | StrokeShader.__init__(self) | ||||
| self.__mult = mult | self.__mult = mult | ||||
| self.__turns = turns | self.__turns = turns | ||||
| self.__bb_len = 1 + float(bb_len) / 100 | self.__bb_len = 1 + float(bb_len) * 0.01 | ||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| stroke.resample(32 * self.__turns) | stroke.resample(32 * self.__turns) | ||||
| fac = 1 / len(stroke) | |||||
| p_mean = Vector((0.0, 0.0)) | p_mean = Vector((0.0, 0.0)) | ||||
| it = stroke.stroke_vertices_begin() | for svert in stroke: | ||||
| while not it.is_end: | p_mean += svert.point | ||||
| p = it.object.point | p_mean *= fac | ||||
| p_mean = p_mean + p | |||||
| it.increment() | p_var = Vector((0.0, 0.0)) | ||||
| sv_nb = stroke.stroke_vertices_size() | p_var_xy = 0.0 | ||||
| p_mean = p_mean / sv_nb | for svert in stroke: | ||||
| p_var_xx = 0 | d = svert.point - p_mean | ||||
| p_var_yy = 0 | p_var += Vector((d.x ** 2, d.y ** 2)) | ||||
| p_var_xy = 0 | p_var_xy += d.x * d.y | ||||
| it = stroke.stroke_vertices_begin() | |||||
| while not it.is_end: | # divide by number of vertices | ||||
| p = it.object.point | p_var.x *= fac | ||||
| p_var_xx = p_var_xx + pow(p.x - p_mean.x, 2) | p_var.y *= fac | ||||
| p_var_yy = p_var_yy + pow(p.y - p_mean.y, 2) | p_var_xy *= fac | ||||
| p_var_xy = p_var_xy + (p.x - p_mean.x) * (p.y - p_mean.y) | trace = p_var.x + p_var.y | ||||
| it.increment() | det = p_var.x * p_var.y - pow(p_var_xy, 2) | ||||
| p_var_xx = p_var_xx / sv_nb | |||||
| p_var_yy = p_var_yy / sv_nb | |||||
| p_var_xy = p_var_xy / sv_nb | |||||
| ## print(p_var_xx, p_var_yy, p_var_xy) | |||||
| trace = p_var_xx + p_var_yy | |||||
| det = p_var_xx * p_var_yy - p_var_xy * p_var_xy | |||||
| sqrt_coeff = sqrt(trace * trace - 4 * det) | sqrt_coeff = sqrt(trace * trace - 4 * det) | ||||
| lambda1 = (trace + sqrt_coeff) / 2 | lambda1, lambda2 = (trace + sqrt_coeff) / 2, (trace - sqrt_coeff) / 2 | ||||
| lambda2 = (trace - sqrt_coeff) / 2 | # make sure those numers aren't to small, rooting them will yield complex numbers | ||||
| ## print(lambda1, lambda2) | lambda1, lambda2 = max(1e-12, lambda1), max(1e-12, lambda2) | ||||
| theta = atan(2 * p_var_xy / (p_var_xx - p_var_yy)) / 2 | theta = atan(2 * p_var_xy / (p_var.x - p_var.y)) / 2 | ||||
| ## print(theta) | |||||
| if p_var_yy > p_var_xx: | |||||
| e1 = Vector((cos(theta + pi / 2), sin(theta + pi / 2))) * sqrt(lambda1) * self.__mult | if p_var.y > p_var.x: | ||||
| e2 = Vector((cos(theta + pi), sin(theta + pi))) * sqrt(lambda2) * self.__mult | # note: 1.57... = pi / 2 | ||||
| e1 = Vector((cos(theta + pi / 2), sin(theta + pi / 2))) * sqrt(lambda1) * self.__mult | |||||
| e2 = Vector((cos(theta + pi ), sin(theta + pi ))) * sqrt(lambda2) * self.__mult | |||||
| else: | else: | ||||
| e1 = Vector((cos(theta), sin(theta))) * sqrt(lambda1) * self.__mult | e1 = Vector((cos(theta), sin(theta))) * sqrt(lambda1) * self.__mult | ||||
| e2 = Vector((cos(theta + pi / 2), sin(theta + pi / 2))) * sqrt(lambda2) * self.__mult | e2 = Vector((cos(theta + pi / 2), sin(theta + pi / 2))) * sqrt(lambda2) * self.__mult | ||||
| ####################################################### | |||||
| sv_nb = sv_nb // self.__turns | # partition the stroke | ||||
| first = sv_nb // 4 | num_segments = len(stroke) // self.__turns | ||||
| second = 2 * first | f = num_segments // 4 | ||||
| third = 3 * first | # indices of the vertices that will form corners | ||||
| fourth = sv_nb | first, second, third, fourth = (f, f * 2, f * 3, num_segments) | ||||
| bb_len1 = self.__bb_len | bb_len1 = self.__bb_len | ||||
| bb_len2 = 1 + (bb_len1 - 1) * sqrt(lambda1 / lambda2) | bb_len2 = 1 + (bb_len1 - 1) * sqrt(lambda1 / lambda2) | ||||
| p_first = p_mean - e1 - e2 * bb_len2 | points = ( | ||||
| p_second = p_mean - e1 * bb_len1 + e2 | p_mean - e1 - e2 * bb_len2, | ||||
| p_third = p_mean + e1 + e2 * bb_len2 | p_mean - e1 * bb_len1 + e2, | ||||
| p_fourth = p_mean + e1 * bb_len1 - e2 | p_mean + e1 + e2 * bb_len2, | ||||
| vec_first = e2 * bb_len2 * 2 | p_mean + e1 * bb_len1 - e2, | ||||
| vec_second = e1 * bb_len1 * 2 | ) | ||||
| vec_third = vec_first * -1 | |||||
| vec_fourth = vec_second * -1 | old_vecs = ( | ||||
| ####################################################### | e2 * bb_len2 * 2, | ||||
| it = stroke.stroke_vertices_begin() | e1 * bb_len1 * 2, | ||||
| visible = True | -e2 * bb_len2 * 2, | ||||
| for j in range(self.__turns): | -e1 * bb_len1 * 2, | ||||
| i = 0 | ) | ||||
| while i < sv_nb: | |||||
| it = iter(stroke) | |||||
| verticesToRemove = list() | |||||
| for j in range(self.__turns): | |||||
| for i, svert in zip(range(num_segments), it): | |||||
| if i < first: | if i < first: | ||||
| p_new = p_first + vec_first * float(i)/float(first - 1) | svert.point = points[0] + old_vecs[0] * i / (first - 1) | ||||
| if i == first - 1: | svert.attribute.visible = (i != first - 1) | ||||
| visible = False | |||||
| elif i < second: | elif i < second: | ||||
| p_new = p_second + vec_second * float(i - first)/float(second - first - 1) | svert.point = points[1] + old_vecs[1] * (i - first) / (second - first - 1) | ||||
| if i == second - 1: | svert.attribute.visible = (i != second - 1) | ||||
| visible = False | |||||
| elif i < third: | elif i < third: | ||||
| p_new = p_third + vec_third * float(i - second)/float(third - second - 1) | svert.point = points[2] + old_vecs[2] * (i - second) / (third - second - 1) | ||||
| if i == third - 1: | svert.attribute.visible = (i != third - 1) | ||||
| visible = False | elif i < fourth: | ||||
| svert.point = points[3] + old_vecs[3] * (i - third) / (fourth - third - 1) | |||||
| svert.attribute.visible = (i != fourth - 1) | |||||
| else: | else: | ||||
| p_new = p_fourth + vec_fourth * float(i - third)/float(fourth - third - 1) | # special case; remove these vertices | ||||
| if i == fourth - 1: | verticesToRemove.append(svert) | ||||
| visible = False | |||||
| it.object.point = p_new | # remove exessive vertices | ||||
| it.object.attribute.visible = visible | if not it.is_end: | ||||
| if not visible: | |||||
| visible = True | |||||
| i = i + 1 | |||||
| it.increment() | |||||
| verticesToRemove = [] | |||||
| while not it.is_end: | |||||
| verticesToRemove.append(it.object) | |||||
| it.increment() | it.increment() | ||||
| for sv in verticesToRemove: | verticesToRemove += [svert for svert in it] | ||||
| stroke.remove_vertex(sv) | for sv in verticesToRemove: | ||||
| stroke.remove_vertex(sv) | |||||
| stroke.update_length() | stroke.update_length() | ||||
| class pyModulateAlphaShader(StrokeShader): | ## various (used in the parameter editor) | ||||
| class RoundCapShader(StrokeShader): | |||||
| """ | """ | ||||
| Limits the stroke's alpha between a min and max value | Reshapes curve caps into a round shape | ||||
| """ | """ | ||||
| def __init__(self, min=0, max=1): | def round_cap_thickness(self, x): | ||||
| StrokeShader.__init__(self) | x = max(0.0, min(x, 1.0)) | ||||
| self.__min = min | return sqrt(1.0 - (x ** 2.0)) | ||||
| self.__max = max | |||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | if len(stroke) < 2: | ||||
| while not it.is_end: | return None | ||||
| alpha = it.object.attribute.alpha | # save the location and attribute of stroke vertices | ||||
| p = it.object.point | buffer = tuple((sv.point, StrokeAttribute(sv.attribute)) for sv in stroke) | ||||
| alpha = alpha * p.y / 400 | nverts = len(buffer) | ||||
| if alpha < self.__min: | # calculate the number of additional vertices to form caps | ||||
| alpha = self.__min | R, L = stroke[0].attribute.thickness | ||||
| elif alpha > self.__max: | caplen_beg = (R + L) / 2.0 | ||||
| alpha = self.__max | nverts_beg = max(5, int(R + L)) | ||||
| it.object.attribute.alpha = alpha | |||||
| it.increment() | R, L = stroke[-1].attribute.thickness | ||||
| caplen_end = (R + L) / 2.0 | |||||
| nverts_end = max(5, int(R + L)) | |||||
| # adjust the total number of stroke vertices | |||||
| stroke.resample(len(stroke) + nverts_beg + nverts_end) | |||||
| # restore the location and attribute of the original vertices | |||||
| for i, (p, attr) in enumerate(buffer): | |||||
| stroke[nverts_beg + i].point = p | |||||
| stroke[nverts_beg + i].attribute = attr | |||||
| # reshape the cap at the beginning of the stroke | |||||
| q, attr = buffer[1] | |||||
| p, attr = buffer[0] | |||||
| d = (p - q) | |||||
| # prevent division by zero | |||||
| d = (d / d.length * caplen_beg) if d.length else Vector((0.0, 0.0)) | |||||
| n = 1.0 / nverts_beg | |||||
| R, L = attr.thickness | |||||
| for i, svert in zip(range(nverts_beg), stroke): | |||||
| t = (nverts_beg - i) * n | |||||
| r = self.round_cap_thickness(t + n) | |||||
| svert.point = p + d * t | |||||
| svert.attribute = attr | |||||
| svert.attribute.thickness = (R * r, L * r) | |||||
| # reshape the cap at the end of the stroke | |||||
| q, attr = buffer[-2] | |||||
| p, attr = buffer[-1] | |||||
| d = (p - q) | |||||
| # prevent division by zero | |||||
| d = (d / d.length * caplen_end) if d.length else Vector((0.0, 0.0)) | |||||
| n = 1.0 / nverts_end | |||||
| R, L = attr.thickness | |||||
| for i in range(nverts_end): | |||||
| svert = stroke[-i - 1] | |||||
| t = (nverts_end - i) * n | |||||
| r = self.round_cap_thickness(t + n) | |||||
| svert.point = p + d * t | |||||
| svert.attribute = attr | |||||
| svert.attribute.thickness = (R * r, L * r) | |||||
| # update the curvilinear 2D length of each vertex | |||||
| stroke.update_length() | |||||
| ## various | class SquareCapShader(StrokeShader): | ||||
| class pyDummyShader(StrokeShader): | |||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| it = stroke.stroke_vertices_begin() | if len(stroke) < 2: | ||||
| while not it.is_end: | return None | ||||
| toto = Interface0DIterator(it) | |||||
| att = it.object.attribute | # save the location and attribute of stroke vertices | ||||
| att.color = (0.3, 0.4, 0.4) | buffer = tuple((sv.point, StrokeAttribute(sv.attribute)) for sv in stroke) | ||||
| att.thickness = (0, 5) | nverts = len(buffer) | ||||
| it.increment() | # calculate the number of additional vertices to form caps | ||||
| R, L = stroke[0].attribute.thickness | |||||
| caplen_beg = (R + L) / 2.0 | |||||
| nverts_beg = 1 | |||||
| R, L = stroke[-1].attribute.thickness | |||||
| caplen_end = (R + L) / 2.0 | |||||
| nverts_end = 1 | |||||
| # adjust the total number of stroke vertices (in this case: add two) | |||||
| stroke.resample(nverts + nverts_beg + nverts_end) | |||||
| # restore the location and attribute of the original vertices | |||||
| for i in range(nverts): | |||||
| p, attr = buffer[i] | |||||
| stroke[nverts_beg + i].point = p | |||||
| stroke[nverts_beg + i].attribute = attr | |||||
| # reshape the cap at the beginning of the stroke | |||||
| q, attr = buffer[1] | |||||
| p, attr = buffer[0] | |||||
| d = p - q | |||||
| d = (d / d.length * caplen_end) if d.length else Vector((0.0, 0.0)) | |||||
| stroke[0].point = p + d | |||||
| stroke[0].attribute = attr | |||||
| # reshape the cap at the end of the stroke | |||||
| q, attr = buffer[-2] | |||||
| p, attr = buffer[-1] | |||||
| d = p - q | |||||
| d = (d / d.length * caplen_beg) if d.length else Vector((0.0, 0.0)) | |||||
| stroke[-1].point = p + d | |||||
| stroke[-1].attribute = attr | |||||
| # update the curvilinear 2D length of each vertex | |||||
| stroke.update_length() | |||||
| class strokeAsBezierShader(StrokeShader): | |||||
| def __init__(self): | |||||
| self.i = 0 | |||||
| StrokeShader.__init__(self) | |||||
| class pyDebugShader(StrokeShader): | |||||
| def shade(self, stroke): | def shade(self, stroke): | ||||
| fe = CF.get_selected_fedge() | if self.i == 0 or 1: | ||||
| id1 = fe.first_svertex.id | # create bezier curve object | ||||
| id2 = fe.second_svertex.id | bpy.ops.curve.primitive_nurbs_path_add() | ||||
| #print(id1.first, id1.second) | curve = bpy.data.curves[-1] | ||||
| #print(id2.first, id2.second) | # the primitive has 2 points, extend to stroke length | ||||
| it = stroke.stroke_vertices_begin() | curve.splines[0].points.add(len(stroke) - 5) | ||||
| found = True | |||||
| foundfirst = True | print(len(stroke), len(curve.splines[0].points)) | ||||
| foundsecond = False | |||||
| while not it.is_end: | for svert, curve_p in zip(stroke, curve.splines[0].points): | ||||
| cp = it.object | curve_p.co = svert.point.to_4d() * .001 | ||||
| if cp.first_svertex.id == id1 or cp.second_svertex.id == id1: | |||||
| foundfirst = True | self.i += 1 | ||||
| if cp.first_svertex.id == id2 or cp.second_svertex.id == id2: | |||||
| foundsecond = True | class CurvatureThicknessShader(StrokeShader): | ||||
| if foundfirst and foundsecond: | def __init__(self, min=4, max=10): | ||||
| found = True | StrokeShader.__init__(self) | ||||
| break | self.t_max = max | ||||
| it.increment() | self.t_min = min | ||||
| if found: | |||||
| print("The selected Stroke id is: ", stroke.id.first, stroke.id.second) | def shade(self, stroke): | ||||
| if len(stroke) < 3: | |||||
| # maximum curvature | |||||
| for svert in stroke: | |||||
| svert.attribute.thickness = (self.t_max / 2, self.t_max / 2) | |||||
| return None | |||||
| for svert, K in zip(stroke, curvature(stroke)): | |||||
| c = (1 - K) * self.t_max + K * self.t_min | |||||
| svert.attribute.thickness = (c / 2, c / 2) | |||||
| class CurvatureColorShader(StrokeShader): | |||||
| def __init__(self, min=(0, 1, 0), max=(1, 0, 0)): | |||||
| StrokeShader.__init__(self) | |||||
| self.t_max = Color(max) | |||||
| self.t_min = Color(min) | |||||
| def shade(self, stroke): | |||||
| if len(stroke) < 3: | |||||
| # maximum curvature | |||||
| for svert in stroke: | |||||
| svert.attribute.color = self.t_max | |||||
| return None | |||||
| for svert, K in zip(stroke, curvature(stroke)): | |||||
| svert.attribute.color = (1 - K) * self.t_max + K * self.t_min | |||||
| Context not available. | |||||
Not Done Inline ActionsIs this strokeAsBezierShader class meant to be here? kjym3: Is this `strokeAsBezierShader` class meant to be here? | |||||
Not Done Inline ActionsThese 3 lines look like testing stuff. Please, remove them if they are unnecessary for a release. kjym3: These 3 lines look like testing stuff. Please, remove them if they are unnecessary for a… | |||||
Not Done Inline ActionsNo. this was an experiment with your similar shader. we can look into inclusion of such a shader after this patch gets applied. flokkievids: No. this was an experiment with your similar shader. we can look into inclusion of such a… | |||||
Please, consider keeping the original order of classes to make code review easier.