Changeset View
Standalone View
release/scripts/freestyle/modules/freestyle/utils.py
| Context not available. | |||||
| integrate, | integrate, | ||||
| ) | ) | ||||
| # constructs for definition of helper functions in Python | from mathutils import Vector | ||||
| from freestyle.types import ( | from functools import lru_cache | ||||
| StrokeVertexIterator, | from math import cos, sin, pi | ||||
| ) | |||||
| import mathutils | |||||
| # -- real utility functions -- # | |||||
| def rgb_to_bw(r, g, b): | |||||
| """ Method to convert rgb to a bw intensity value. """ | |||||
| return 0.35 * r + 0.45 * g + 0.2 * b | |||||
| def bound(lower, x, higher): | |||||
| """ Returns x bounded by a maximum and minimum value. equivalent to: | |||||
| return min(max(x, lower), higher) | |||||
| """ | |||||
| # this is about 50% quicker than min(max(x, lower), higher) | |||||
| return (lower if x <= lower else higher if x >= higher else x) | |||||
kjym3: I guess this helper function needs to be renamed to `chromatize` as referred to in the code… | |||||
Not Done Inline ActionsI'm still unsure about the name. do you have a prefference? flokkievids: I'm still unsure about the name. do you have a prefference? | |||||
Not Done Inline ActionsI don't really have much preference: rgb_to_grayscale, rgb_to_intensity, chromatize, or whatever makes sense. One option is rgb_to_bw() since the same formula appears in source/blender/blenlib/intern/math_color_inline.c. kjym3: I don't really have much preference: rgb_to_grayscale, rgb_to_intensity, chromatize, or… | |||||
Not Done Inline Actionstook that one flokkievids: took that one | |||||
| def bounding_box(stroke): | |||||
| """ | |||||
| Returns the maximum and minimum coordinates (the bounding box) of the stroke's vertices | |||||
| """ | |||||
| x, y = zip(*(svert.point for svert in stroke)) | |||||
| return (Vector((min(x), min(y))), Vector((max(x), max(y)))) | |||||
Not Done Inline ActionsI saw this helper function also in your regression test suite. Do you actually want to include this in the freestyle.utils module? kjym3: I saw this helper function also in your regression test suite. Do you actually want to include… | |||||
Not Done Inline ActionsI think it's not needed. (pretty nice recipe though) flokkievids: I think it's not needed. (pretty nice recipe though) | |||||
| # -- General helper functions -- # | |||||
| @lru_cache(maxsize=32) | |||||
| def phase_to_direction(length): | |||||
| """ | |||||
| Returns a list of tuples each containing: | |||||
| - the phase | |||||
| - a Vector with the values of the cosine and sine of 2pi * phase (the direction) | |||||
| """ | |||||
| results = list() | |||||
| for i in range(length): | |||||
| phase = i / (length - 1) | |||||
| results.append((phase, Vector((cos(2 * pi * phase), sin(2 * pi * phase))))) | |||||
| return results | |||||
| # -- helper functions for chaining -- # | |||||
| def get_chain_length(ve, orientation): | |||||
| """Returns the 2d length of a given ViewEdge """ | |||||
| from freestyle.chainingiterators import pyChainSilhouetteGenericIterator | |||||
| length = 0.0 | |||||
Not Done Inline ActionsOriginally these iter_ helper functions were returning an iterator and extra object(s), because in the caller side, one might need to call 0D/1D functions which take an Interface0DIterator. It is true that returning a stroke vertex instead of an iterator saves one line of code (i.e., sv = it.object), but at the cost of creating an iterator in parallel to the iter_ loop on the caller side if it is necessary to call 0D/1D functions. To me returning an iterator instead of a stroke vertex seems a way to go since it will help make user code simple and straightforward although the extra line sv = it.object might appear clumsy. kjym3: Originally these `iter_` helper functions were returning an iterator and extra object(s)… | |||||
| # setup iterator | |||||
| _it = pyChainSilhouetteGenericIterator(False, False) | |||||
| _it.begin = ve | |||||
| _it.current_edge = ve | |||||
| _it.orientation = orientation | |||||
| _it.init() | |||||
| # run iterator till end of chain | |||||
| while not (_it.is_end): | |||||
| length += _it.object.length_2d | |||||
| if (_it.is_begin): | |||||
| # _it has looped back to the beginning; | |||||
| # break to prevent infinite loop | |||||
| break | |||||
| _it.increment() | |||||
| # reset iterator | |||||
| _it.begin = ve | |||||
| _it.current_edge = ve | |||||
| _it.orientation = orientation | |||||
| # run iterator till begin of chain | |||||
| if not _it.is_begin: | |||||
| _it.decrement() | |||||
| while not (_it.is_end or _it.is_begin): | |||||
| length += _it.object.length_2d | |||||
| _it.decrement() | |||||
| return length | |||||
| def find_matching_vertex(id, it): | |||||
| """Finds the matching vertexn, or returns None """ | |||||
| return next((ve for ve in it if ve.id == id), None) | |||||
| # -- helper functions for iterating -- # | |||||
| def iter_current_previous(stroke): | |||||
| """ | |||||
| iterates over the given iterator. yields a tuple of the form | |||||
| (it, prev, current) | |||||
| """ | |||||
| prev = stroke[0] | |||||
| it = Interface0DIterator(stroke) | |||||
| for current in it: | |||||
| yield (it, prev, current) | |||||
| def iter_t2d_along_stroke(stroke): | |||||
| """ | |||||
| Yields the distance between two stroke vertices | |||||
| relative to the total stroke length. | |||||
| """ | |||||
| total = stroke.length_2d | |||||
| distance = 0.0 | |||||
| for it, prev, svert in iter_current_previous(stroke): | |||||
| distance += (prev.point - svert.point).length | |||||
| t = min(distance / total, 1.0) if total > 0.0 else 0.0 | |||||
| yield (it, t) | |||||
| def iter_distance_from_camera(stroke, range_min, range_max): | |||||
| """ | |||||
| Yields the distance to the camera relative to the maximum | |||||
| possible distance for every stroke vertex, constrained by | |||||
| given minimum and maximum values. | |||||
| """ | |||||
| normfac = range_max - range_min # normalization factor | |||||
| it = Interface0DIterator(stroke) | |||||
| for svert in it: | |||||
| distance = svert.point_3d.length # in the camera coordinate | |||||
| if distance < range_min: | |||||
| t = 0.0 | |||||
| elif distance > range_max: | |||||
| t = 1.0 | |||||
Not Done Inline ActionsThe function name could be rgb_to_intensity or rgb_to_grayscale. There are many named RGB-to-grayscale conversion methods (see http://en.wikipedia.org/wiki/Grayscale for more info), but the formula used here seems not any of them. I don't remember from where I took it... kjym3: The function name could be `rgb_to_intensity` or `rgb_to_grayscale`.
There are many named RGB… | |||||
| else: | |||||
| t = (distance - range_min) / normfac | |||||
| yield (it, t) | |||||
| def iter_distance_from_object(stroke, object, range_min, range_max): | |||||
| """ | |||||
| yields the distance to the given object relative to the maximum | |||||
| possible distance for every stroke vertex, constrained by | |||||
| given minimum and maximum values. | |||||
| """ | |||||
| scene = getCurrentScene() | |||||
| mv = scene.camera.matrix_world.copy().inverted() # model-view matrix | |||||
| loc = mv * object.location # loc in the camera coordinate | |||||
| normfac = range_max - range_min # normalization factor | |||||
| it = Interface0DIterator(stroke) | |||||
| for svert in it: | |||||
| distance = (svert.point_3d - loc).length # in the camera coordinate | |||||
| if distance < range_min: | |||||
| t = 0.0 | |||||
| elif distance > range_max: | |||||
| t = 1.0 | |||||
| else: | |||||
| t = (distance - range_min) / normfac | |||||
| yield (it, t) | |||||
| def iter_material_color(stroke, material_attribute): | |||||
| """ | |||||
| yields the specified material attribute for every stroke vertex. | |||||
| the material is taken from the object behind the vertex. | |||||
| """ | |||||
| func = CurveMaterialF0D() | |||||
| it = Interface0DIterator(stroke) | |||||
| for inter in it: | |||||
| material = func(it) | |||||
| if material_attribute == 'DIFF': | |||||
| color = material.diffuse[0:3] | |||||
| elif material_attribute == 'SPEC': | |||||
| color = material.specular[0:3] | |||||
| else: | |||||
| raise ValueError("unexpected material attribute: " + material_attribute) | |||||
| yield (it, color) | |||||
| def iter_material_value(stroke, material_attribute): | |||||
| """ | |||||
| yields a specific material attribute | |||||
| from the vertex' underlying material. | |||||
| """ | |||||
| func = CurveMaterialF0D() | |||||
| it = Interface0DIterator(stroke) | |||||
Not Done Inline ActionsThis helper function seems not to work in the way the name suggests, since prev and svert are not being updated in the for loop. kjym3: This helper function seems not to work in the way the name suggests, since `prev` and `svert`… | |||||
| for svert in it: | |||||
| material = func(it) | |||||
| if material_attribute == 'DIFF': | |||||
| t = rgb_to_bw(*material.diffuse[0:3]) | |||||
| elif material_attribute == 'DIFF_R': | |||||
| t = material.diffuse[0] | |||||
| elif material_attribute == 'DIFF_G': | |||||
| t = material.diffuse[1] | |||||
| elif material_attribute == 'DIFF_B': | |||||
| t = material.diffuse[2] | |||||
| elif material_attribute == 'SPEC': | |||||
| t = rgb_to_bw(*material.specular[0:3]) | |||||
| elif material_attribute == 'SPEC_R': | |||||
| t = material.specular[0] | |||||
| elif material_attribute == 'SPEC_G': | |||||
| t = material.specular[1] | |||||
| elif material_attribute == 'SPEC_B': | |||||
| t = material.specular[2] | |||||
| elif material_attribute == 'SPEC_HARDNESS': | |||||
| t = material.shininess | |||||
| elif material_attribute == 'ALPHA': | |||||
| t = material.diffuse[3] | |||||
| else: | |||||
| raise ValueError("unexpected material attribute: " + material_attribute) | |||||
| yield (it, t) | |||||
| def iter_distance_along_stroke(stroke): | |||||
| """ | |||||
| yields the absolute distance between | |||||
| the current and preceding vertex. | |||||
| """ | |||||
| distance = 0.0 | |||||
| prev = stroke[0] | |||||
| it = Interface0DIterator(stroke) | |||||
| for svert in it: | |||||
| p = svert.point | |||||
| distance += (prev - p).length | |||||
| prev = p.copy() # need a copy because the point can be altered | |||||
| yield it, distance | |||||
| def iter_triplet(it): | |||||
| """ | |||||
| Iterates over it, yielding a tuple containing | |||||
| the current vertex and its immediate neighbors | |||||
| """ | |||||
| prev = next(it) | |||||
| current = next(it) | |||||
| for succ in it: | |||||
| yield prev, current, succ | |||||
| prev, current = current, succ | |||||
| # -- mathmatical operations -- # | |||||
| def stroke_curvature(it): | |||||
| """ | |||||
| Compute the 2D curvature at the stroke vertex pointed by the iterator 'it'. | |||||
| K = 1 / R | |||||
| where R is the radius of the circle going through the current vertex and its neighbors | |||||
| """ | |||||
| if it.is_end or it.is_begin: | |||||
| return 0.0 | |||||
| next = it.incremented().point | |||||
| prev = it.decremented().point | |||||
| current = it.object.point | |||||
| ab = (current - prev) | |||||
| bc = (next - current) | |||||
| ac = (prev - next) | |||||
| a, b, c = ab.length, bc.length, ac.length | |||||
| try: | |||||
| area = 0.5 * ab.cross(ac) | |||||
| K = (4 * area) / (a * b * c) | |||||
| K = bound(0.0, K, 1.0) | |||||
| except ZeroDivisionError: | |||||
| K = 0.0 | |||||
| return K | |||||
| def stroke_normal(it): | def stroke_normal(it): | ||||
| Context not available. | |||||
| they have already been modified by stroke geometry modifiers. | they have already been modified by stroke geometry modifiers. | ||||
| """ | """ | ||||
| # first stroke segment | # first stroke segment | ||||
| it_next = StrokeVertexIterator(it) | it_next = it.incremented() | ||||
| it_next.increment() | |||||
| if it.is_begin: | if it.is_begin: | ||||
| e = it_next.object.point_2d - it.object.point_2d | e = it_next.object.point_2d - it.object.point_2d | ||||
| n = mathutils.Vector((e[1], -e[0])) | n = Vector((e[1], -e[0])) | ||||
| n.normalize() | return n.normalized() | ||||
| return n | |||||
| # last stroke segment | # last stroke segment | ||||
| it_prev = StrokeVertexIterator(it) | it_prev = it.decremented() | ||||
| it_prev.decrement() | |||||
| if it_next.is_end: | if it_next.is_end: | ||||
| e = it.object.point_2d - it_prev.object.point_2d | e = it.object.point_2d - it_prev.object.point_2d | ||||
| n = mathutils.Vector((e[1], -e[0])) | n = Vector((e[1], -e[0])) | ||||
| n.normalize() | return n.normalized() | ||||
| return n | |||||
| # two subsequent stroke segments | # two subsequent stroke segments | ||||
| e1 = it_next.object.point_2d - it.object.point_2d | e1 = it_next.object.point_2d - it.object.point_2d | ||||
| e2 = it.object.point_2d - it_prev.object.point_2d | e2 = it.object.point_2d - it_prev.object.point_2d | ||||
| n1 = mathutils.Vector((e1[1], -e1[0])) | n1 = Vector((e1[1], -e1[0])).normalized() | ||||
| n2 = mathutils.Vector((e2[1], -e2[0])) | n2 = Vector((e2[1], -e2[0])).normalized() | ||||
| n1.normalize() | n = (n1 + n2) | ||||
| n2.normalize() | return n.normalized() | ||||
| n = n1 + n2 | |||||
| n.normalize() | |||||
| return n | |||||
| Context not available. | |||||
I guess this helper function needs to be renamed to chromatize as referred to in the code below?