Changeset View
Standalone View
release/scripts/freestyle/modules/freestyle/utils.py
| Context not available. | |||||
| integrate, | integrate, | ||||
| ) | ) | ||||
| from freestyle.types import ( | |||||
| Interface0DIterator, | |||||
| ) | |||||
| from mathutils import Vector | from mathutils import Vector | ||||
| from functools import lru_cache | from functools import lru_cache | ||||
| from math import cos, sin, pi | from math import cos, sin, pi | ||||
| from itertools import tee | |||||
| # -- real utility functions -- # | # -- real utility functions -- # | ||||
| def rgb_to_bw(r, g, b): | def rgb_to_bw(r, g, b): | ||||
| """ Method to convert rgb to a bw intensity value. """ | """ Method to convert rgb to a bw intensity value. """ | ||||
| return 0.35 * r + 0.45 * g + 0.2 * b | return 0.35 * r + 0.45 * g + 0.2 * b | ||||
| Context not available. | |||||
| x, y = zip(*(svert.point for svert in stroke)) | x, y = zip(*(svert.point for svert in stroke)) | ||||
| return (Vector((min(x), min(y))), Vector((max(x), max(y)))) | return (Vector((min(x), min(y))), Vector((max(x), max(y)))) | ||||
| # -- General helper functions -- # | # -- General helper functions -- # | ||||
| @lru_cache(maxsize=32) | @lru_cache(maxsize=32) | ||||
| def phase_to_direction(length): | def phase_to_direction(length): | ||||
| """ | """ | ||||
| Context not available. | |||||
| # -- helper functions for chaining -- # | # -- helper functions for chaining -- # | ||||
| def get_chain_length(ve, orientation): | def get_chain_length(ve, orientation): | ||||
| """Returns the 2d length of a given ViewEdge """ | """Returns the 2d length of a given ViewEdge """ | ||||
| from freestyle.chainingiterators import pyChainSilhouetteGenericIterator | from freestyle.chainingiterators import pyChainSilhouetteGenericIterator | ||||
| Context not available. | |||||
| return length | return length | ||||
| def find_matching_vertex(id, it): | def find_matching_vertex(id, it): | ||||
| """Finds the matching vertexn, or returns None """ | """Finds the matching vertexn, or returns None """ | ||||
| return next((ve for ve in it if ve.id == id), None) | return next((ve for ve in it if ve.id == id), None) | ||||
| # -- helper functions for iterating -- # | # -- helper functions for iterating -- # | ||||
| def pairwise(iterable): | |||||
| """Yields a tuple containing the current and previous object """ | |||||
| a,b = tee(iterable) | |||||
| next(b, None) | |||||
| return zip(a, b) | |||||
| def iter_current_previous(stroke): | def tripplewise(iterable): | ||||
| """ | """Yields a tuple containing the current object and its immediate neighbors """ | ||||
| iterates over the given iterator. yields a tuple of the form | a, b, c = tee(iterable) | ||||
kjym3: Is there any advantage in using .incremented() ? If not, always using tee() seems fine. | |||||
Not Done Inline Actionsas I understand it, tee stores (at least parts of) the iterators objects in memory. tee itself has some overhead as well. Incremented() gives a new iterator, no copying is done. Tests also indicated that it is slightly faster. if the above is true, it is also more efficient memory-wise. (this is also in agreement with Alex Gaynor's Fast python, slow python talk, in which he states that the best tool for the job is almost always the most specific one.) flokkievids: as I understand it, tee stores (at least parts of) the iterators objects in memory. tee itself… | |||||
Not Done Inline ActionsHow about using hasattr(itrable, "incremented") then instead of listing class names? Another option is just to assume that iterable has the incremented() method and catch AttributeError. kjym3: How about using `hasattr(itrable, "incremented")` then instead of listing class names? Another… | |||||
| (it, prev, current) | next(b, None) | ||||
| """ | next(c, None) | ||||
| prev = stroke[0] | next(c, None) | ||||
| it = Interface0DIterator(stroke) | return zip(a, b, c) | ||||
| for current in it: | |||||
| yield (it, prev, current) | |||||
| def iter_t2d_along_stroke(stroke): | def iter_t2d_along_stroke(stroke): | ||||
| """ | """ Yields the progress along the stroke """ | ||||
| Yields the distance between two stroke vertices | |||||
| relative to the total stroke length. | |||||
| """ | |||||
| total = stroke.length_2d | total = stroke.length_2d | ||||
| distance = 0.0 | distance = 0.0 | ||||
| for it, prev, svert in iter_current_previous(stroke): | # yield for the comparison from the first vertex to itself | ||||
| yield 0.0 | |||||
| for prev, svert in pairwise(stroke): | |||||
| distance += (prev.point - svert.point).length | distance += (prev.point - svert.point).length | ||||
| t = min(distance / total, 1.0) if total > 0.0 else 0.0 | yield min(distance / total, 1.0) if total != 0.0 else 0.0 | ||||
| yield (it, t) | |||||
| def iter_distance_from_camera(stroke, range_min, range_max): | def iter_distance_from_camera(stroke, range_min, range_max): | ||||
| Context not available. | |||||
| possible distance for every stroke vertex, constrained by | possible distance for every stroke vertex, constrained by | ||||
| given minimum and maximum values. | given minimum and maximum values. | ||||
| """ | """ | ||||
| normfac = range_max - range_min # normalization factor | normfac = range_max - range_min | ||||
| it = Interface0DIterator(stroke) | for svert in stroke: | ||||
| for svert in it: | # length in the camera coordinate | ||||
| distance = svert.point_3d.length # in the camera coordinate | distance = svert.point_3d.length | ||||
| if distance < range_min: | if range_min < distance < range_max: | ||||
| t = 0.0 | yield (distance - range_min) / normfac | ||||
| elif distance > range_max: | |||||
| t = 1.0 | |||||
| else: | else: | ||||
| t = (distance - range_min) / normfac | yield 0.0 if range_min > distance else 1.0 | ||||
| yield (it, t) | |||||
| def iter_distance_from_object(stroke, object, range_min, range_max): | def iter_distance_from_object(stroke, location, range_min, range_max): | ||||
| """ | """ | ||||
| yields the distance to the given object relative to the maximum | yields the distance to the given object relative to the maximum | ||||
| possible distance for every stroke vertex, constrained by | possible distance for every stroke vertex, constrained by | ||||
Not Done Inline ActionsJust for curiosity, do you have a performance difference if division by normfac is replaced by a multiplication by 1/normfac? normfac = 1.0 / normfac
for svert in stroke:
...
yield (svert, (distance - range_min) * normfac)kjym3: Just for curiosity, do you have a performance difference if division by `normfac` is replaced… | |||||
Not Done Inline Actionswith 100K iterations, division is .005 seconds slower than multiplication. Not worth it in my opinion. flokkievids: with 100K iterations, division is .005 seconds slower than multiplication. Not worth it in my… | |||||
| given minimum and maximum values. | given minimum and maximum values. | ||||
| """ | """ | ||||
| scene = getCurrentScene() | |||||
| mv = scene.camera.matrix_world.copy().inverted() # model-view matrix | |||||
| loc = mv * object.location # loc in the camera coordinate | |||||
| normfac = range_max - range_min # normalization factor | normfac = range_max - range_min # normalization factor | ||||
| it = Interface0DIterator(stroke) | for svert in stroke: | ||||
| for svert in it: | distance = (svert.point_3d - location).length # in the camera coordinate | ||||
| distance = (svert.point_3d - loc).length # in the camera coordinate | if range_min < distance < range_max: | ||||
| if distance < range_min: | yield (distance - range_min) / normfac | ||||
| t = 0.0 | |||||
| elif distance > range_max: | |||||
| t = 1.0 | |||||
| else: | else: | ||||
| t = (distance - range_min) / normfac | yield 0.0 if distance < range_min else 1.0 | ||||
| yield (it, t) | |||||
| def iter_material_color(stroke, material_attribute): | def get_material_value(material, attribute): | ||||
| """ | """ | ||||
| yields the specified material attribute for every stroke vertex. | Returns a specific material attribute | ||||
| the material is taken from the object behind the vertex. | |||||
| """ | |||||
| func = CurveMaterialF0D() | |||||
| it = Interface0DIterator(stroke) | |||||
| for inter in it: | |||||
| material = func(it) | |||||
| if material_attribute == 'DIFF': | |||||
| color = material.diffuse[0:3] | |||||
| elif material_attribute == 'SPEC': | |||||
| color = material.specular[0:3] | |||||
| else: | |||||
| raise ValueError("unexpected material attribute: " + material_attribute) | |||||
| yield (it, color) | |||||
| def iter_material_value(stroke, material_attribute): | |||||
| """ | |||||
| yields a specific material attribute | |||||
| from the vertex' underlying material. | from the vertex' underlying material. | ||||
| """ | """ | ||||
| func = CurveMaterialF0D() | # main | ||||
| it = Interface0DIterator(stroke) | if attribute == 'DIFF': | ||||
| for svert in it: | return rgb_to_bw(*material.diffuse[0:3]) | ||||
| material = func(it) | elif attribute == 'ALPHA': | ||||
| if material_attribute == 'DIFF': | return material.diffuse[3] | ||||
| t = rgb_to_bw(*material.diffuse[0:3]) | elif attribute == 'SPEC': | ||||
| elif material_attribute == 'DIFF_R': | return rgb_to_bw(*material.specular[0:3]) | ||||
| t = material.diffuse[0] | # diffuse seperate | ||||
| elif material_attribute == 'DIFF_G': | elif attribute == 'DIFF_R': | ||||
| t = material.diffuse[1] | return material.diffuse[0] | ||||
| elif material_attribute == 'DIFF_B': | elif attribute == 'DIFF_G': | ||||
| t = material.diffuse[2] | return material.diffuse[1] | ||||
| elif material_attribute == 'SPEC': | elif attribute == 'DIFF_B': | ||||
| t = rgb_to_bw(*material.specular[0:3]) | return material.diffuse[2] | ||||
| elif material_attribute == 'SPEC_R': | # specular seperate | ||||
| t = material.specular[0] | elif attribute == 'SPEC_R': | ||||
| elif material_attribute == 'SPEC_G': | return material.specular[0] | ||||
| t = material.specular[1] | elif attribute == 'SPEC_G': | ||||
| elif material_attribute == 'SPEC_B': | return material.specular[1] | ||||
| t = material.specular[2] | elif attribute == 'SPEC_B': | ||||
| elif material_attribute == 'SPEC_HARDNESS': | return material.specular[2] | ||||
| t = material.shininess | elif attribute == 'SPEC_HARDNESS': | ||||
| elif material_attribute == 'ALPHA': | return material.shininess | ||||
| t = material.diffuse[3] | else: | ||||
| else: | raise ValueError("unexpected material attribute: " + attribute) | ||||
| raise ValueError("unexpected material attribute: " + material_attribute) | |||||
| yield (it, t) | |||||
| def iter_distance_along_stroke(stroke): | def iter_distance_along_stroke(stroke): | ||||
| Context not available. | |||||
| the current and preceding vertex. | the current and preceding vertex. | ||||
| """ | """ | ||||
| distance = 0.0 | distance = 0.0 | ||||
| prev = stroke[0] | # the positions need to be copied, because they are changed in the calling function | ||||
| it = Interface0DIterator(stroke) | points = tuple(svert.point.copy() for svert in stroke) | ||||
| for svert in it: | yield distance | ||||
| p = svert.point | for prev, curr in pairwise(points): | ||||
| distance += (prev - p).length | distance += (prev - curr).length | ||||
| prev = p.copy() # need a copy because the point can be altered | yield distance | ||||
| yield it, distance | |||||
| def iter_triplet(it): | |||||
| """ | |||||
| Iterates over it, yielding a tuple containing | |||||
| the current vertex and its immediate neighbors | |||||
| """ | |||||
| prev = next(it) | |||||
| current = next(it) | |||||
| for succ in it: | |||||
| yield prev, current, succ | |||||
| prev, current = current, succ | |||||
| # -- mathmatical operations -- # | # -- mathmatical operations -- # | ||||
| def stroke_curvature(it): | def stroke_curvature(it): | ||||
| """ | """ | ||||
| Compute the 2D curvature at the stroke vertex pointed by the iterator 'it'. | Compute the 2D curvature at the stroke vertex pointed by the iterator 'it'. | ||||
| Context not available. | |||||
| return K | return K | ||||
| def stroke_normal(it): | def stroke_normal(stroke): | ||||
| """ | |||||
| Compute the 2D normal at the stroke vertex pointed by the iterator | |||||
| 'it'. It is noted that Normal2DF0D computes normals based on | |||||
| underlying FEdges instead, which is inappropriate for strokes when | |||||
| they have already been modified by stroke geometry modifiers. | |||||
| """ | |||||
| n = len(stroke) - 1 | |||||
| for i, svert in enumerate(stroke): | |||||
| if i == 0: | |||||
| e = stroke[i + 1].point - svert.point | |||||
| yield Vector((e[1], -e[0])).normalized() | |||||
| elif i == n: | |||||
| e = svert.point - stroke[i - 1].point | |||||
| yield Vector((e[1], -e[0])).normalized() | |||||
| else: | |||||
| e1 = stroke[i + 1].point - svert.point | |||||
| e2 = svert.point - stroke[i - 1].point | |||||
| n1 = Vector((e1[1], -e1[0])).normalized() | |||||
| n2 = Vector((e2[1], -e2[0])).normalized() | |||||
| yield (n1 + n2).normalized() | |||||
| # DEPRECACTED and unused, the version above is way quicker | |||||
| def stroke_normal1(it): | |||||
| """ | """ | ||||
Not Done Inline ActionsThe slower version can be removed. kjym3: The slower version can be removed. | |||||
| Compute the 2D normal at the stroke vertex pointed by the iterator | Compute the 2D normal at the stroke vertex pointed by the iterator | ||||
| 'it'. It is noted that Normal2DF0D computes normals based on | 'it'. It is noted that Normal2DF0D computes normals based on | ||||
| Context not available. | |||||
| they have already been modified by stroke geometry modifiers. | they have already been modified by stroke geometry modifiers. | ||||
| """ | """ | ||||
| # first stroke segment | # first stroke segment | ||||
| it_next = it.incremented() | #it_next = it.incremented() | ||||
| it_next = Interface0DIterator(it) | |||||
| it_next.increment() | |||||
| if it.is_begin: | if it.is_begin: | ||||
| e = it_next.object.point_2d - it.object.point_2d | e = it_next.object.point_2d - it.object.point_2d | ||||
| n = Vector((e[1], -e[0])) | return Vector((e[1], -e[0])).normalized() | ||||
| return n.normalized() | |||||
| # last stroke segment | # last stroke segment | ||||
| it_prev = it.decremented() | #it_prev = it.decremented() | ||||
| it_prev = Interface0DIterator(it) | |||||
| it_prev.decrement() | |||||
| if it_next.is_end: | if it_next.is_end: | ||||
| e = it.object.point_2d - it_prev.object.point_2d | e = it.object.point_2d - it_prev.object.point_2d | ||||
| n = Vector((e[1], -e[0])) | return Vector((e[1], -e[0])).normalized() | ||||
| return n.normalized() | |||||
| # two subsequent stroke segments | # two subsequent stroke segments | ||||
| e1 = it_next.object.point_2d - it.object.point_2d | e1 = it_next.object.point_2d - it.object.point_2d | ||||
| e2 = it.object.point_2d - it_prev.object.point_2d | e2 = it.object.point_2d - it_prev.object.point_2d | ||||
| n1 = Vector((e1[1], -e1[0])).normalized() | n1 = Vector((e1[1], -e1[0])).normalized() | ||||
| n2 = Vector((e2[1], -e2[0])).normalized() | n2 = Vector((e2[1], -e2[0])).normalized() | ||||
| n = (n1 + n2) | return (n1 + n2).normalized() | ||||
| return n.normalized() | No newline at end of file | ||||
| Context not available. | |||||
Is there any advantage in using .incremented() ? If not, always using tee() seems fine.