Changeset View
Standalone View
release/scripts/freestyle/modules/freestyle/utils.py
| Context not available. | |||||
| integrate, | integrate, | ||||
| ) | ) | ||||
| from freestyle.types import ( | |||||
| Interface0DIterator, | |||||
| Stroke, | |||||
| StrokeVertexIterator, | |||||
| ) | |||||
| from mathutils import Vector | from mathutils import Vector | ||||
| from functools import lru_cache | from functools import lru_cache | ||||
| from math import cos, sin, pi | from math import cos, sin, pi | ||||
| from itertools import tee | |||||
| # -- real utility functions -- # | # -- real utility functions -- # | ||||
| def rgb_to_bw(r, g, b): | def rgb_to_bw(r, g, b): | ||||
| """ Method to convert rgb to a bw intensity value. """ | """ Method to convert rgb to a bw intensity value. """ | ||||
| return 0.35 * r + 0.45 * g + 0.2 * b | return 0.35 * r + 0.45 * g + 0.2 * b | ||||
| Context not available. | |||||
| x, y = zip(*(svert.point for svert in stroke)) | x, y = zip(*(svert.point for svert in stroke)) | ||||
| return (Vector((min(x), min(y))), Vector((max(x), max(y)))) | return (Vector((min(x), min(y))), Vector((max(x), max(y)))) | ||||
| # -- General helper functions -- # | # -- General helper functions -- # | ||||
| Context not available. | |||||
| # -- helper functions for chaining -- # | # -- helper functions for chaining -- # | ||||
| def get_chain_length(ve, orientation): | def get_chain_length(ve, orientation): | ||||
| """Returns the 2d length of a given ViewEdge """ | """Returns the 2d length of a given ViewEdge """ | ||||
| from freestyle.chainingiterators import pyChainSilhouetteGenericIterator | from freestyle.chainingiterators import pyChainSilhouetteGenericIterator | ||||
| Context not available. | |||||
| def find_matching_vertex(id, it): | def find_matching_vertex(id, it): | ||||
| """Finds the matching vertexn, or returns None """ | """Finds the matching vertex, or returns None """ | ||||
| return next((ve for ve in it if ve.id == id), None) | return next((ve for ve in it if ve.id == id), None) | ||||
| # -- helper functions for iterating -- # | # -- helper functions for iterating -- # | ||||
| def pairwise(iterable, types={Stroke, StrokeVertexIterator}): | |||||
| """Yields a tuple containing the previous and current object """ | |||||
| # use .incremented() for types that support it | |||||
| if type(iterable) in types: | |||||
| it = iter(iterable) | |||||
kjym3: Is there any advantage in using .incremented() ? If not, always using tee() seems fine. | |||||
Not Done Inline Actionsas I understand it, tee stores (at least parts of) the iterators objects in memory. tee itself has some overhead as well. Incremented() gives a new iterator, no copying is done. Tests also indicated that it is slightly faster. if the above is true, it is also more efficient memory-wise. (this is also in agreement with Alex Gaynor's Fast python, slow python talk, in which he states that the best tool for the job is almost always the most specific one.) flokkievids: as I understand it, tee stores (at least parts of) the iterators objects in memory. tee itself… | |||||
Not Done Inline ActionsHow about using hasattr(itrable, "incremented") then instead of listing class names? Another option is just to assume that iterable has the incremented() method and catch AttributeError. kjym3: How about using `hasattr(itrable, "incremented")` then instead of listing class names? Another… | |||||
| return zip(it, it.incremented()) | |||||
| else: | |||||
| a, b = tee(iterable) | |||||
| next(b, None) | |||||
| return zip(a, b) | |||||
| def iter_current_previous(stroke): | |||||
| """ | def tripplewise(iterable): | ||||
| iterates over the given iterator. yields a tuple of the form | """Yields a tuple containing the current object and its immediate neighbors """ | ||||
| (it, prev, current) | a, b, c = tee(iterable) | ||||
| """ | next(b, None) | ||||
| prev = stroke[0] | next(c, None) | ||||
| it = Interface0DIterator(stroke) | return zip(a, b, c) | ||||
| for current in it: | |||||
| yield (it, prev, current) | |||||
| def iter_t2d_along_stroke(stroke): | def iter_t2d_along_stroke(stroke): | ||||
| """ | """ Yields the progress along the stroke """ | ||||
| Yields the distance between two stroke vertices | |||||
| relative to the total stroke length. | |||||
| """ | |||||
| total = stroke.length_2d | total = stroke.length_2d | ||||
| distance = 0.0 | distance = 0.0 | ||||
| for it, prev, svert in iter_current_previous(stroke): | # yield for the comparison from the first vertex to itself | ||||
| yield 0.0 | |||||
| for prev, svert in pairwise(stroke): | |||||
| distance += (prev.point - svert.point).length | distance += (prev.point - svert.point).length | ||||
| t = min(distance / total, 1.0) if total > 0.0 else 0.0 | yield min(distance / total, 1.0) if total != 0.0 else 0.0 | ||||
| yield (it, t) | |||||
| def iter_distance_from_camera(stroke, range_min, range_max): | def iter_distance_from_camera(stroke, range_min, range_max, normfac): | ||||
| """ | """ | ||||
| Yields the distance to the camera relative to the maximum | Yields the distance to the camera relative to the maximum | ||||
| possible distance for every stroke vertex, constrained by | possible distance for every stroke vertex, constrained by | ||||
| given minimum and maximum values. | given minimum and maximum values. | ||||
| """ | """ | ||||
| normfac = range_max - range_min # normalization factor | for svert in stroke: | ||||
| it = Interface0DIterator(stroke) | # length in the camera coordinate | ||||
| for svert in it: | distance = svert.point_3d.length | ||||
| distance = svert.point_3d.length # in the camera coordinate | if range_min < distance < range_max: | ||||
| if distance < range_min: | yield (svert, (distance - range_min) / normfac) | ||||
kjym3Unsubmitted Not Done Inline ActionsJust for curiosity, do you have a performance difference if division by normfac is replaced by a multiplication by 1/normfac? normfac = 1.0 / normfac
for svert in stroke:
...
yield (svert, (distance - range_min) * normfac)kjym3: Just for curiosity, do you have a performance difference if division by `normfac` is replaced… | |||||
flokkievidsAuthorUnsubmitted Not Done Inline Actionswith 100K iterations, division is .005 seconds slower than multiplication. Not worth it in my opinion. flokkievids: with 100K iterations, division is .005 seconds slower than multiplication. Not worth it in my… | |||||
| t = 0.0 | |||||
| elif distance > range_max: | |||||
| t = 1.0 | |||||
| else: | else: | ||||
| t = (distance - range_min) / normfac | yield (svert, 0.0) if range_min > distance else (svert, 1.0) | ||||
| yield (it, t) | |||||
| def iter_distance_from_object(stroke, object, range_min, range_max): | def iter_distance_from_object(stroke, location, range_min, range_max, normfac): | ||||
| """ | """ | ||||
| yields the distance to the given object relative to the maximum | yields the distance to the given object relative to the maximum | ||||
| possible distance for every stroke vertex, constrained by | possible distance for every stroke vertex, constrained by | ||||
| given minimum and maximum values. | given minimum and maximum values. | ||||
| """ | """ | ||||
| scene = getCurrentScene() | for svert in stroke: | ||||
| mv = scene.camera.matrix_world.copy().inverted() # model-view matrix | distance = (svert.point_3d - location).length # in the camera coordinate | ||||
| loc = mv * object.location # loc in the camera coordinate | if range_min < distance < range_max: | ||||
| normfac = range_max - range_min # normalization factor | yield (svert, (distance - range_min) / normfac) | ||||
| it = Interface0DIterator(stroke) | |||||
| for svert in it: | |||||
| distance = (svert.point_3d - loc).length # in the camera coordinate | |||||
| if distance < range_min: | |||||
| t = 0.0 | |||||
| elif distance > range_max: | |||||
| t = 1.0 | |||||
| else: | else: | ||||
| t = (distance - range_min) / normfac | yield (svert, 0.0) if distance < range_min else (svert, 1.0) | ||||
| yield (it, t) | |||||
| def get_material_value(material, attribute): | |||||
| def iter_material_color(stroke, material_attribute): | "Returns a specific material attribute from the vertex' underlying material. " | ||||
| """ | # main | ||||
| yields the specified material attribute for every stroke vertex. | if attribute == 'DIFF': | ||||
| the material is taken from the object behind the vertex. | return rgb_to_bw(*material.diffuse[0:3]) | ||||
| """ | elif attribute == 'ALPHA': | ||||
| func = CurveMaterialF0D() | return material.diffuse[3] | ||||
| it = Interface0DIterator(stroke) | elif attribute == 'SPEC': | ||||
| for inter in it: | return rgb_to_bw(*material.specular[0:3]) | ||||
| material = func(it) | elif material_attribute == 'LINE': | ||||
| if material_attribute == 'DIFF': | return rgb_to_bw(*material.line[0:3]) | ||||
| color = material.diffuse[0:3] | # line seperate | ||||
| elif material_attribute == 'SPEC': | elif material_attribute == 'LINE_R': | ||||
| color = material.specular[0:3] | return material.line[0] | ||||
| else: | elif material_attribute == 'LINE_G': | ||||
| raise ValueError("unexpected material attribute: " + material_attribute) | return material.line[1] | ||||
| yield (it, color) | elif material_attribute == 'LINE_B': | ||||
| return material.line[2] | |||||
| elif material_attribute == 'ALPHA': | |||||
| def iter_material_value(stroke, material_attribute): | return material.line[3] | ||||
| """ | # diffuse seperate | ||||
| yields a specific material attribute | elif attribute == 'DIFF_R': | ||||
| from the vertex' underlying material. | return material.diffuse[0] | ||||
| """ | elif attribute == 'DIFF_G': | ||||
| func = CurveMaterialF0D() | return material.diffuse[1] | ||||
| elif attribute == 'DIFF_B': | |||||
| return material.diffuse[2] | |||||
| # specular seperate | |||||
| elif attribute == 'SPEC_R': | |||||
| return material.specular[0] | |||||
| elif attribute == 'SPEC_G': | |||||
| return material.specular[1] | |||||
| elif attribute == 'SPEC_B': | |||||
| return material.specular[2] | |||||
| elif attribute == 'SPEC_HARDNESS': | |||||
| return material.shininess | |||||
| else: | |||||
| raise ValueError("unexpected material attribute: " + attribute) | |||||
| def iter_material_value(stroke, func, attribute): | |||||
| "Returns a specific material attribute from the vertex' underlying material. " | |||||
| it = Interface0DIterator(stroke) | it = Interface0DIterator(stroke) | ||||
| for svert in it: | for svert in it: | ||||
| material = func(it) | material = func(it) | ||||
| if material_attribute == 'DIFF': | # main | ||||
| t = rgb_to_bw(*material.diffuse[0:3]) | if attribute == 'DIFF': | ||||
| elif material_attribute == 'DIFF_R': | value = rgb_to_bw(*material.diffuse[0:3]) | ||||
| t = material.diffuse[0] | elif attribute == 'ALPHA': | ||||
| elif material_attribute == 'DIFF_G': | value = material.diffuse[3] | ||||
| t = material.diffuse[1] | elif attribute == 'SPEC': | ||||
| elif material_attribute == 'DIFF_B': | value = rgb_to_bw(*material.specular[0:3]) | ||||
| t = material.diffuse[2] | # diffuse seperate | ||||
| elif material_attribute == 'SPEC': | elif attribute == 'DIFF_R': | ||||
| t = rgb_to_bw(*material.specular[0:3]) | value = material.diffuse[0] | ||||
| elif material_attribute == 'SPEC_R': | elif attribute == 'DIFF_G': | ||||
| t = material.specular[0] | value = material.diffuse[1] | ||||
| elif material_attribute == 'SPEC_G': | elif attribute == 'DIFF_B': | ||||
| t = material.specular[1] | value = material.diffuse[2] | ||||
| elif material_attribute == 'SPEC_B': | # specular seperate | ||||
| t = material.specular[2] | elif attribute == 'SPEC_R': | ||||
| elif material_attribute == 'SPEC_HARDNESS': | value = material.specular[0] | ||||
| t = material.shininess | elif attribute == 'SPEC_G': | ||||
| elif material_attribute == 'ALPHA': | value = material.specular[1] | ||||
| t = material.diffuse[3] | elif attribute == 'SPEC_B': | ||||
| value = material.specular[2] | |||||
| elif attribute == 'SPEC_HARDNESS': | |||||
| value = material.shininess | |||||
| else: | else: | ||||
| raise ValueError("unexpected material attribute: " + material_attribute) | raise ValueError("unexpected material attribute: " + attribute) | ||||
| yield (it, t) | yield (svert, value) | ||||
| def iter_distance_along_stroke(stroke): | def iter_distance_along_stroke(stroke): | ||||
| """ | "Yields the absolute distance along the stroke up to the current vertex." | ||||
| yields the absolute distance between | |||||
| the current and preceding vertex. | |||||
| """ | |||||
| distance = 0.0 | distance = 0.0 | ||||
| prev = stroke[0] | # the positions need to be copied, because they are changed in the calling function | ||||
| it = Interface0DIterator(stroke) | points = tuple(svert.point.copy() for svert in stroke) | ||||
| for svert in it: | yield distance | ||||
| p = svert.point | for prev, curr in pairwise(points): | ||||
| distance += (prev - p).length | distance += (prev - curr).length | ||||
| prev = p.copy() # need a copy because the point can be altered | yield distance | ||||
| yield it, distance | |||||
| def iter_triplet(it): | |||||
| """ | |||||
| Iterates over it, yielding a tuple containing | |||||
| the current vertex and its immediate neighbors | |||||
| """ | |||||
| prev = next(it) | |||||
| current = next(it) | |||||
| for succ in it: | |||||
| yield prev, current, succ | |||||
| prev, current = current, succ | |||||
| # -- mathmatical operations -- # | # -- mathmatical operations -- # | ||||
| Context not available. | |||||
| K = 1 / R | K = 1 / R | ||||
| where R is the radius of the circle going through the current vertex and its neighbors | where R is the radius of the circle going through the current vertex and its neighbors | ||||
| """ | """ | ||||
| for _ in it: | |||||
| if (it.is_begin or it.is_end): | |||||
| yield 0.0 | |||||
| continue | |||||
| else: | |||||
| it.decrement() | |||||
| prev, current, succ = it.object.point.copy(), next(it).point.copy(), next(it).point.copy() | |||||
| # return the iterator in an unchanged state | |||||
| it.decrement() | |||||
Not Done Inline ActionsThe slower version can be removed. kjym3: The slower version can be removed. | |||||
| if it.is_end or it.is_begin: | ab = (current - prev) | ||||
| return 0.0 | bc = (succ - current) | ||||
| ac = (prev - succ) | |||||
| next = it.incremented().point | |||||
| prev = it.decremented().point | |||||
| current = it.object.point | |||||
| ab = (current - prev) | |||||
| bc = (next - current) | |||||
| ac = (prev - next) | |||||
| a, b, c = ab.length, bc.length, ac.length | |||||
| try: | |||||
| area = 0.5 * ab.cross(ac) | |||||
| K = (4 * area) / (a * b * c) | |||||
| K = bound(0.0, K, 1.0) | |||||
| except ZeroDivisionError: | a, b, c = ab.length, bc.length, ac.length | ||||
| K = 0.0 | |||||
| return K | try: | ||||
| area = 0.5 * ab.cross(ac) | |||||
| K = (4 * area) / (a * b * c) | |||||
| except ZeroDivisionError: | |||||
| K = 0.0 | |||||
| yield abs(K) | |||||
| def stroke_normal(it): | def stroke_normal(stroke): | ||||
| """ | """ | ||||
| Compute the 2D normal at the stroke vertex pointed by the iterator | Compute the 2D normal at the stroke vertex pointed by the iterator | ||||
| 'it'. It is noted that Normal2DF0D computes normals based on | 'it'. It is noted that Normal2DF0D computes normals based on | ||||
| underlying FEdges instead, which is inappropriate for strokes when | underlying FEdges instead, which is inappropriate for strokes when | ||||
| they have already been modified by stroke geometry modifiers. | they have already been modified by stroke geometry modifiers. | ||||
| The returned normals are dynamic: they update when the | |||||
| vertex position (and therefore the vertex normal) changes. | |||||
| for use in geometry modifiers it is advised to | |||||
| cast this generator function to a tuple or list | |||||
| returns a list of normals | |||||
| """ | """ | ||||
| # first stroke segment | n = len(stroke) - 1 | ||||
| it_next = it.incremented() | |||||
| if it.is_begin: | for i, svert in enumerate(stroke): | ||||
| e = it_next.object.point_2d - it.object.point_2d | if i == 0: | ||||
| n = Vector((e[1], -e[0])) | e = stroke[i + 1].point - svert.point | ||||
| return n.normalized() | yield Vector((e[1], -e[0])).normalized() | ||||
| # last stroke segment | elif i == n: | ||||
| it_prev = it.decremented() | e = svert.point - stroke[i - 1].point | ||||
| if it_next.is_end: | yield Vector((e[1], -e[0])).normalized() | ||||
| e = it.object.point_2d - it_prev.object.point_2d | else: | ||||
| n = Vector((e[1], -e[0])) | e1 = stroke[i + 1].point - svert.point | ||||
| return n.normalized() | e2 = svert.point - stroke[i - 1].point | ||||
| # two subsequent stroke segments | n1 = Vector((e1[1], -e1[0])).normalized() | ||||
| e1 = it_next.object.point_2d - it.object.point_2d | n2 = Vector((e2[1], -e2[0])).normalized() | ||||
| e2 = it.object.point_2d - it_prev.object.point_2d | yield (n1 + n2).normalized() | ||||
| n1 = Vector((e1[1], -e1[0])).normalized() | |||||
| n2 = Vector((e2[1], -e2[0])).normalized() | def get_test_stroke(): | ||||
| n = (n1 + n2) | """Returns a static stroke object for testing """ | ||||
| return n.normalized() | from freestyle.types import Stroke, Interface0DIterator, StrokeVertexIterator, SVertex, Id, StrokeVertex | ||||
| # points for our fake stroke | |||||
| points = (Vector((1.0, 5.0, 3.0)), Vector((1.0, 2.0, 9.0)), | |||||
| Vector((6.0, 2.0, 3.0)), Vector((7.0, 2.0, 3.0)), | |||||
| Vector((2.0, 6.0, 3.0)), Vector((2.0, 8.0, 3.0))) | |||||
| ids = (Id(0, 0), Id(1, 1), Id(2, 2), Id(3, 3), Id(4, 4), Id(5, 5)) | |||||
| stroke = Stroke() | |||||
| it = iter(stroke) | |||||
| for svert in map(SVertex, points, ids): | |||||
| stroke.insert_vertex(StrokeVertex(svert), it) | |||||
| it = iter(stroke) | |||||
| stroke.update_length() | |||||
| return stroke | |||||
| No newline at end of file | |||||
| Context not available. | |||||
Is there any advantage in using .incremented() ? If not, always using tee() seems fine.