From 5fcbad653359a90c8acbbd99a1e13d61cf8559aa Mon Sep 17 00:00:00 2001 From: avanishsubbiah Date: Sat, 30 Jul 2022 11:00:03 -0400 Subject: [PATCH] Added python material-color-utilities library --- python/LICENSE | 202 +++++++++++++++++ python/blend/blend.py | 121 ++++++++++ python/hct/cam16.py | 252 +++++++++++++++++++++ python/hct/hct.py | 229 +++++++++++++++++++ python/hct/viewing_conditions.py | 114 ++++++++++ python/palettes/core_palette.py | 42 ++++ python/palettes/tonal_palette.py | 61 +++++ python/quantize/lab_point_provider.py | 51 +++++ python/quantize/quantizer_celebi.py | 50 +++++ python/quantize/quantizer_map.py | 45 ++++ python/quantize/quantizer_wsmeans.py | 182 +++++++++++++++ python/quantize/quantizer_wu.py | 308 ++++++++++++++++++++++++++ python/scheme/scheme.py | 213 ++++++++++++++++++ python/score/score.py | 128 +++++++++++ python/utils/color_utils.py | 269 ++++++++++++++++++++++ python/utils/image_utils.py | 81 +++++++ python/utils/math_utils.py | 105 +++++++++ python/utils/string_utils.py | 72 ++++++ python/utils/theme_utils.py | 116 ++++++++++ 19 files changed, 2641 insertions(+) create mode 100644 python/LICENSE create mode 100644 python/blend/blend.py create mode 100644 python/hct/cam16.py create mode 100644 python/hct/hct.py create mode 100644 python/hct/viewing_conditions.py create mode 100644 python/palettes/core_palette.py create mode 100644 python/palettes/tonal_palette.py create mode 100644 python/quantize/lab_point_provider.py create mode 100644 python/quantize/quantizer_celebi.py create mode 100644 python/quantize/quantizer_map.py create mode 100644 python/quantize/quantizer_wsmeans.py create mode 100644 python/quantize/quantizer_wu.py create mode 100644 python/scheme/scheme.py create mode 100644 python/score/score.py create mode 100644 python/utils/color_utils.py create mode 100644 python/utils/image_utils.py create mode 100644 python/utils/math_utils.py create mode 100644 python/utils/string_utils.py create mode 100644 python/utils/theme_utils.py diff --git a/python/LICENSE b/python/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/python/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/python/blend/blend.py b/python/blend/blend.py new file mode 100644 index 0000000..20629fd --- /dev/null +++ b/python/blend/blend.py @@ -0,0 +1,121 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from hct.cam16 import * +from hct.hct import * +from utils.color_utils import * +from utils.math_utils import * + +# // libmonet is designed to have a consistent API across platforms +# // and modular components that can be moved around easily. Using a class as a +# // namespace facilitates this. +# // +# // tslint:disable:class-as-namespace +# /** +# * Functions for blending in HCT and CAM16. +# */ +class Blend: + # /** + # * Blend the design color's HCT hue towards the key color's HCT + # * hue, in a way that leaves the original color recognizable and + # * recognizably shifted towards the key color. + # * + # * @param designColor ARGB representation of an arbitrary color. + # * @param sourceColor ARGB representation of the main theme color. + # * @return The design color with a hue shifted towards the + # * system's color, a slightly warmer/cooler variant of the design + # * color's hue. + # */ + # Changed var differenceDegrees to differenceDegrees_v to avoid overwrite + @staticmethod + def harmonize(designColor, sourceColor): + fromHct = Hct.fromInt(designColor) + toHct = Hct.fromInt(sourceColor) + differenceDegrees_v = differenceDegrees(fromHct.hue, toHct.hue) + rotationDegrees = min(differenceDegrees_v * 0.5, 15.0) + outputHue = sanitizeDegreesDouble(fromHct.hue + rotationDegrees * Blend.rotationDirection(fromHct.hue, toHct.hue)) + return Hct.fromHct(outputHue, fromHct.chroma, fromHct.tone).toInt() + + # /** + # * Blends hue from one color into another. The chroma and tone of + # * the original color are maintained. + # * + # * @param from ARGB representation of color + # * @param to ARGB representation of color + # * @param amount how much blending to perform; 0.0 >= and <= 1.0 + # * @return from, with a hue blended towards to. Chroma and tone + # * are constant. + # */ + # Changed "from" arg to "from_v", from is reserved in Python + @staticmethod + def hctHue(from_v, to, amount): + ucs = Blend.cam16Ucs(from_v, to, amount) + ucsCam = Cam16.fromInt(ucs) + fromCam = Cam16.fromInt(from_v) + blended = Hct.fromHct(ucsCam.hue, fromCam.chroma, lstarFromArgb(from_v)) + return blended.toInt() + + # /** + # * Blend in CAM16-UCS space. + # * + # * @param from ARGB representation of color + # * @param to ARGB representation of color + # * @param amount how much blending to perform; 0.0 >= and <= 1.0 + # * @return from, blended towards to. Hue, chroma, and tone will + # * change. + # */ + # Changed "from" arg to "from_v", from is reserved in Python + @staticmethod + def cam16Ucs(from_v, to, amount): + fromCam = Cam16.fromInt(from_v) + toCam = Cam16.fromInt(to) + fromJ = fromCam.jstar + fromA = fromCam.astar + fromB = fromCam.bstar + toJ = toCam.jstar + toA = toCam.astar + toB = toCam.bstar + jstar = fromJ + (toJ - fromJ) * amount + astar = fromA + (toA - fromA) * amount + bstar = fromB + (toB - fromB) * amount + return Cam16.fromUcs(jstar, astar, bstar).toInt() + + # /** + # * Sign of direction change needed to travel from one angle to + # * another. + # * + # * @param from The angle travel starts from, in degrees. + # * @param to The angle travel ends at, in degrees. + # * @return -1 if decreasing from leads to the shortest travel + # * distance, 1 if increasing from leads to the shortest travel + # * distance. + # */ + # Changed "from" arg to "from_v", from is reserved in Python + @staticmethod + def rotationDirection(from_v, to): + a = to - from_v + b = to - from_v + 360.0 + c = to - from_v - 360.0 + aAbs = abs(a) + bAbs = abs(b) + cAbs = abs(c) + if (aAbs <= bAbs and aAbs <= cAbs): + return 1.0 if a >= 0.0 else -1.0 + elif (bAbs <= aAbs and bAbs <= cAbs): + return 1.0 if b >= 0.0 else -1.0 + else: + return 1.0 if c >= 0.0 else -1.0 diff --git a/python/hct/cam16.py b/python/hct/cam16.py new file mode 100644 index 0000000..c8a8557 --- /dev/null +++ b/python/hct/cam16.py @@ -0,0 +1,252 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from utils.color_utils import * +from utils.math_utils import * +from hct.viewing_conditions import * +import math + +# /** +# * CAM16, a color appearance model. Colors are not just defined by their hex +# * code, but rather, a hex code and viewing conditions. +# * +# * CAM16 instances also have coordinates in the CAM16-UCS space, called J*, a*, +# * b*, or jstar, astar, bstar in code. CAM16-UCS is included in the CAM16 +# * specification, and should be used when measuring distances between colors. +# * +# * In traditional color spaces, a color can be identified solely by the +# * observer's measurement of the color. Color appearance models such as CAM16 +# * also use information about the environment where the color was +# * observed, known as the viewing conditions. +# * +# * For example, white under the traditional assumption of a midday sun white +# * point is accurately measured as a slightly chromatic blue by CAM16. (roughly, +# * hue 203, chroma 3, lightness 100) +# */ +class Cam16: + # /** + # * All of the CAM16 dimensions can be calculated from 3 of the dimensions, in + # * the following combinations: + # * - {j or q} and {c, m, or s} and hue + # * - jstar, astar, bstar + # * Prefer using a static method that constructs from 3 of those dimensions. + # * This constructor is intended for those methods to use to return all + # * possible dimensions. + # * + # * @param hue + # * @param chroma informally, colorfulness / color intensity. like saturation + # * in HSL, except perceptually accurate. + # * @param j lightness + # * @param q brightness ratio of lightness to white point's lightness + # * @param m colorfulness + # * @param s saturation ratio of chroma to white point's chroma + # * @param jstar CAM16-UCS J coordinate + # * @param astar CAM16-UCS a coordinate + # * @param bstar CAM16-UCS b coordinate + # */ + def __init__(self, hue, chroma, j, q, m, s, jstar, astar, bstar): + self.hue = hue + self.chroma = chroma + self.j = j + self.q = q + self.m = m + self.s = s + self.jstar = jstar + self.astar = astar + self.bstar = bstar + + # /** + # * CAM16 instances also have coordinates in the CAM16-UCS space, called J*, + # * a*, b*, or jstar, astar, bstar in code. CAM16-UCS is included in the CAM16 + # * specification, and is used to measure distances between colors. + # */ + def distance(self, other): + dJ = self.jstar - other.jstar + dA = self.astar - other.astar + dB = self.bstar - other.bstar + dEPrime = math.sqrt(dJ * dJ + dA * dA + dB * dB) + dE = 1.41 * pow(dEPrime, 0.63) + return dE + + # /** + # * @param argb ARGB representation of a color. + # * @return CAM16 color, assuming the color was viewed in default viewing + # * conditions. + # */ + @staticmethod + def fromInt(argb): + return Cam16.fromIntInViewingConditions(argb, ViewingConditions.DEFAULT) + + # /** + # * @param argb ARGB representation of a color. + # * @param viewingConditions Information about the environment where the color + # * was observed. + # * @return CAM16 color. + # */ + @staticmethod + def fromIntInViewingConditions(argb, viewingConditions): + red = (argb & 0x00ff0000) >> 16 + green = (argb & 0x0000ff00) >> 8 + blue = (argb & 0x000000ff) + redL = linearized(red) + greenL = linearized(green) + blueL = linearized(blue) + x = 0.41233895 * redL + 0.35762064 * greenL + 0.18051042 * blueL + y = 0.2126 * redL + 0.7152 * greenL + 0.0722 * blueL + z = 0.01932141 * redL + 0.11916382 * greenL + 0.95034478 * blueL + rC = 0.401288 * x + 0.650173 * y - 0.051461 * z + gC = -0.250268 * x + 1.204414 * y + 0.045854 * z + bC = -0.002079 * x + 0.048952 * y + 0.953127 * z + rD = viewingConditions.rgbD[0] * rC + gD = viewingConditions.rgbD[1] * gC + bD = viewingConditions.rgbD[2] * bC + rAF = pow((viewingConditions.fl * abs(rD)) / 100.0, 0.42) + gAF = pow((viewingConditions.fl * abs(gD)) / 100.0, 0.42) + bAF = pow((viewingConditions.fl * abs(bD)) / 100.0, 0.42) + rA = (signum(rD) * 400.0 * rAF) / (rAF + 27.13) + gA = (signum(gD) * 400.0 * gAF) / (gAF + 27.13) + bA = (signum(bD) * 400.0 * bAF) / (bAF + 27.13) + a = (11.0 * rA + -12.0 * gA + bA) / 11.0 + b = (rA + gA - 2.0 * bA) / 9.0 + u = (20.0 * rA + 20.0 * gA + 21.0 * bA) / 20.0 + p2 = (40.0 * rA + 20.0 * gA + bA) / 20.0 + atan2 = math.atan2(b, a) + atanDegrees = (atan2 * 180.0) / math.pi + hue = atanDegrees + 360.0 if atanDegrees < 0 else atanDegrees - 360.0 if atanDegrees >= 360 else atanDegrees + hueRadians = (hue * math.pi) / 180.0 + ac = p2 * viewingConditions.nbb + j = 100.0 * pow(ac / viewingConditions.aw, viewingConditions.c * viewingConditions.z) + q = (4.0 / viewingConditions.c) * math.sqrt(j / 100.0) * (viewingConditions.aw + 4.0) * viewingConditions.fLRoot + huePrime = hue + 360 if hue < 20.14 else hue + eHue = 0.25 * (math.cos((huePrime * math.pi) / 180.0 + 2.0) + 3.8) + p1 = (50000.0 / 13.0) * eHue * viewingConditions.nc * viewingConditions.ncb + t = (p1 * math.sqrt(a * a + b * b)) / (u + 0.305) + alpha = pow(t, 0.9) * pow(1.64 - pow(0.29, viewingConditions.n), 0.73) + c = alpha * math.sqrt(j / 100.0) + m = c * viewingConditions.fLRoot + s = 50.0 * math.sqrt((alpha * viewingConditions.c) / (viewingConditions.aw + 4.0)) + jstar = ((1.0 + 100.0 * 0.007) * j) / (1.0 + 0.007 * j) + mstar = (1.0 / 0.0228) * math.log(1.0 + 0.0228 * m) + astar = mstar * math.cos(hueRadians) + bstar = mstar * math.sin(hueRadians) + return Cam16(hue, c, j, q, m, s, jstar, astar, bstar) + + # /** + # * @param j CAM16 lightness + # * @param c CAM16 chroma + # * @param h CAM16 hue + # */ + @staticmethod + def fromJch(j, c, h): + return Cam16.fromJchInViewingConditions(j, c, h, ViewingConditions.DEFAULT) + + # /** + # * @param j CAM16 lightness + # * @param c CAM16 chroma + # * @param h CAM16 hue + # * @param viewingConditions Information about the environment where the color + # * was observed. + # */ + @staticmethod + def fromJchInViewingConditions(j, c, h, viewingConditions): + q = (4.0 / viewingConditions.c) * math.sqrt(j / 100.0) * (viewingConditions.aw + 4.0) * viewingConditions.fLRoot + m = c * viewingConditions.fLRoot + alpha = c / math.sqrt(j / 100.0) + s = 50.0 * math.sqrt((alpha * viewingConditions.c) / (viewingConditions.aw + 4.0)) + hueRadians = (h * math.pi) / 180.0 + jstar = ((1.0 + 100.0 * 0.007) * j) / (1.0 + 0.007 * j) + mstar = (1.0 / 0.0228) * math.log(1.0 + 0.0228 * m) + astar = mstar * math.cos(hueRadians) + bstar = mstar * math.sin(hueRadians) + return Cam16(h, c, j, q, m, s, jstar, astar, bstar) + + # /** + # * @param jstar CAM16-UCS lightness. + # * @param astar CAM16-UCS a dimension. Like a* in L*a*b*, it is a Cartesian + # * coordinate on the Y axis. + # * @param bstar CAM16-UCS b dimension. Like a* in L*a*b*, it is a Cartesian + # * coordinate on the X axis. + # */ + @staticmethod + def fromUcs(jstar, astar, bstar): + return Cam16.fromUcsInViewingConditions(jstar, astar, bstar, ViewingConditions.DEFAULT) + + # /** + # * @param jstar CAM16-UCS lightness. + # * @param astar CAM16-UCS a dimension. Like a* in L*a*b*, it is a Cartesian + # * coordinate on the Y axis. + # * @param bstar CAM16-UCS b dimension. Like a* in L*a*b*, it is a Cartesian + # * coordinate on the X axis. + # * @param viewingConditions Information about the environment where the color + # * was observed. + # */ + @staticmethod + def fromUcsInViewingConditions(jstar, astar, bstar, viewingConditions): + a = astar + b = bstar + m = math.sqrt(a * a + b * b) + M = (math.exp(m * 0.0228) - 1.0) / 0.0228 + c = M / viewingConditions.fLRoot + h = math.atan2(b, a) * (180.0 / math.pi) + if (h < 0.0): + h += 360.0 + j = jstar / (1 - (jstar - 100) * 0.007) + return Cam16.fromJchInViewingConditions(j, c, h, viewingConditions) + + # /** + # * @return ARGB representation of color, assuming the color was viewed in + # * default viewing conditions, which are near-identical to the default + # * viewing conditions for sRGB. + # */ + def toInt(self): + return self.viewed(ViewingConditions.DEFAULT) + + # /** + # * @param viewingConditions Information about the environment where the color + # * will be viewed. + # * @return ARGB representation of color + # */ + def viewed(self, viewingConditions): + alpha = 0.0 if self.chroma == 0.0 or self.j == 0.0 else self.chroma / math.sqrt(self.j / 100.0) + t = pow(alpha / pow(1.64 - pow(0.29, viewingConditions.n), 0.73), 1.0 / 0.9) + hRad = (self.hue * math.pi) / 180.0 + eHue = 0.25 * (math.cos(hRad + 2.0) + 3.8) + ac = viewingConditions.aw * pow(self.j / 100.0, 1.0 / viewingConditions.c / viewingConditions.z) + p1 = eHue * (50000.0 / 13.0) * viewingConditions.nc * viewingConditions.ncb + p2 = ac / viewingConditions.nbb + hSin = math.sin(hRad) + hCos = math.cos(hRad) + gamma = (23.0 * (p2 + 0.305) * t) / (23.0 * p1 + 11.0 * t * hCos + 108.0 * t * hSin) + a = gamma * hCos + b = gamma * hSin + rA = (460.0 * p2 + 451.0 * a + 288.0 * b) / 1403.0 + gA = (460.0 * p2 - 891.0 * a - 261.0 * b) / 1403.0 + bA = (460.0 * p2 - 220.0 * a - 6300.0 * b) / 1403.0 + rCBase = max(0, (27.13 * abs(rA)) / (400.0 - abs(rA))) + rC = signum(rA) * (100.0 / viewingConditions.fl) * pow(rCBase, 1.0 / 0.42) + gCBase = max(0, (27.13 * abs(gA)) / (400.0 - abs(gA))) + gC = signum(gA) * (100.0 / viewingConditions.fl) * pow(gCBase, 1.0 / 0.42) + bCBase = max(0, (27.13 * abs(bA)) / (400.0 - abs(bA))) + bC = signum(bA) * (100.0 / viewingConditions.fl) * pow(bCBase, 1.0 / 0.42) + rF = rC / viewingConditions.rgbD[0] + gF = gC / viewingConditions.rgbD[1] + bF = bC / viewingConditions.rgbD[2] + x = 1.86206786 * rF - 1.01125463 * gF + 0.14918677 * bF + y = 0.38752654 * rF + 0.62144744 * gF - 0.00897398 * bF + z = -0.01584150 * rF - 0.03412294 * gF + 1.04996444 * bF + argb = argbFromXyz(x, y, z) + return argb diff --git a/python/hct/hct.py b/python/hct/hct.py new file mode 100644 index 0000000..c70b456 --- /dev/null +++ b/python/hct/hct.py @@ -0,0 +1,229 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +# /** +# * A color system built using CAM16 hue and chroma, and L* from +# * L*a*b*. +# * +# * Using L* creates a link between the color system, contrast, and thus +# * accessibility. Contrast ratio depends on relative luminance, or Y in the XYZ +# * color space. L*, or perceptual luminance can be calculated from Y. +# * +# * Unlike Y, L* is linear to human perception, allowing trivial creation of +# * accurate color tones. +# * +# * Unlike contrast ratio, measuring contrast in L* is linear, and simple to +# * calculate. A difference of 40 in HCT tone guarantees a contrast ratio >= 3.0, +# * and a difference of 50 guarantees a contrast ratio >= 4.5. +# */ +from utils.color_utils import * +from utils.math_utils import * +from hct.cam16 import * +from hct.viewing_conditions import * + +# /** +# * When the delta between the floor & ceiling of a binary search for maximum +# * chroma at a hue and tone is less than this, the binary search terminates. +# */ +CHROMA_SEARCH_ENDPOINT = 0.4 + +# /** +# * The maximum color distance, in CAM16-UCS, between a requested color and the +# * color returned. +# */ +DE_MAX = 1.0 + +# /** The maximum difference between the requested L* and the L* returned. */ +DL_MAX = 0.2 + +# /** +# * When the delta between the floor & ceiling of a binary search for J, +# * lightness in CAM16, is less than this, the binary search terminates. +# */ +LIGHTNESS_SEARCH_ENDPOINT = 0.01 + +# /** +# * @param hue CAM16 hue +# * @param chroma CAM16 chroma +# * @param tone L*a*b* lightness +# * @return CAM16 instance within error tolerance of the provided dimensions, +# * or null. +# */ +def findCamByJ(hue, chroma, tone): + low = 0.0 + high = 100.0 + mid = 0.0 + bestdL = 1000.0 + bestdE = 1000.0 + bestCam = None + while (abs(low - high) > LIGHTNESS_SEARCH_ENDPOINT): + mid = low + (high - low) / 2 + camBeforeClip = Cam16.fromJch(mid, chroma, hue) + clipped = camBeforeClip.toInt() + clippedLstar = lstarFromArgb(clipped) + dL = abs(tone - clippedLstar) + if (dL < DL_MAX): + camClipped = Cam16.fromInt(clipped) + dE = camClipped.distance(Cam16.fromJch(camClipped.j, camClipped.chroma, hue)) + if (dE <= DE_MAX and dE <= bestdE): + bestdL = dL + bestdE = dE + bestCam = camClipped + if (bestdL == 0 and bestdE == 0): + break + if (clippedLstar < tone): + low = mid + else: + high = mid + return bestCam + +# /** +# * @param hue CAM16 hue. +# * @param chroma CAM16 chroma. +# * @param tone L*a*b* lightness. +# * @param viewingConditions Information about the environment where the color +# * was observed. +# */ +def getIntInViewingConditions(hue, chroma, tone, viewingConditions): + if (chroma < 1.0 or round(tone) <= 0.0 or round(tone) >= 100.0): + return argbFromLstar(tone) + + hue = sanitizeDegreesDouble(hue) + high = chroma + mid = chroma + low = 0.0 + isFirstLoop = True + answer = None + while (abs(low - high) >= CHROMA_SEARCH_ENDPOINT): + possibleAnswer = findCamByJ(hue, mid, tone) + if (isFirstLoop): + if (possibleAnswer != None): + return possibleAnswer.viewed(viewingConditions) + else: + isFirstLoop = False + mid = low + (high - low) / 2.0 + continue + if (possibleAnswer == None): + high = mid + else: + answer = possibleAnswer + low = mid + mid = low + (high - low) / 2.0 + if (answer == None): + return argbFromLstar(tone) + return answer.viewed(viewingConditions) + +# /** +# * @param hue a number, in degrees, representing ex. red, orange, yellow, etc. +# * Ranges from 0 <= hue < 360. +# * @param chroma Informally, colorfulness. Ranges from 0 to roughly 150. +# * Like all perceptually accurate color systems, chroma has a different +# * maximum for any given hue and tone, so the color returned may be lower +# * than the requested chroma. +# * @param tone Lightness. Ranges from 0 to 100. +# * @return ARGB representation of a color in default viewing conditions +# */ +def getInt(hue, chroma, tone): + return getIntInViewingConditions(sanitizeDegreesDouble(hue), chroma, clampDouble(0.0, 100.0, tone), ViewingConditions.DEFAULT) + +# /** +# * HCT, hue, chroma, and tone. A color system that provides a perceptually +# * accurate color measurement system that can also accurately render what colors +# * will appear as in different lighting environments. +# */ +class Hct: + def __init__(self, internalHue, internalChroma, internalTone): + self.internalHue = internalHue + self.internalChroma = internalChroma + self.internalTone = internalTone + self.setInternalState(self.toInt()) + + # /** + # * @param hue 0 <= hue < 360; invalid values are corrected. + # * @param chroma 0 <= chroma < ?; Informally, colorfulness. The color + # * returned may be lower than the requested chroma. Chroma has a different + # * maximum for any given hue and tone. + # * @param tone 0 <= tone <= 100; invalid values are corrected. + # * @return HCT representation of a color in default viewing conditions. + # */ + # Function renamed from "from" to "fromHct", from is reserved in Python + @staticmethod + def fromHct(hue, chroma, tone): + return Hct(hue, chroma, tone) + + # /** + # * @param argb ARGB representation of a color. + # * @return HCT representation of a color in default viewing conditions + # */ + @staticmethod + def fromInt(argb): + cam = Cam16.fromInt(argb) + tone = lstarFromArgb(argb) + return Hct(cam.hue, cam.chroma, tone) + + def toInt(self): + return getInt(self.internalHue, self.internalChroma, self.internalTone) + + # /** + # * A number, in degrees, representing ex. red, orange, yellow, etc. + # * Ranges from 0 <= hue < 360. + # */ + def get_hue(self): + return self.internalHue + + # /** + # * @param newHue 0 <= newHue < 360; invalid values are corrected. + # * Chroma may decrease because chroma has a different maximum for any given + # * hue and tone. + # */ + def set_hue(self, newHue): + self.setInternalState(getInt(sanitizeDegreesDouble(newHue), self.internalChroma, self.internalTone)) + + def get_chroma(self): + return self.internalChroma + + # /** + # * @param newChroma 0 <= newChroma < ? + # * Chroma may decrease because chroma has a different maximum for any given + # * hue and tone. + # */ + def set_chroma(self, newChroma): + self.setInternalState(getInt(self.internalHue, newChroma, self.internalTone)) + + # /** Lightness. Ranges from 0 to 100. */ + def get_tone(self): + return self.internalTone + + # /** + # * @param newTone 0 <= newTone <= 100; invalid valids are corrected. + # * Chroma may decrease because chroma has a different maximum for any given + # * hue and tone. + # */ + def set_tone(self, newTone): + self.setInternalState(getInt(self.internalHue, self.internalChroma, newTone)) + + def setInternalState(self, argb): + cam = Cam16.fromInt(argb) + tone = lstarFromArgb(argb) + self.internalHue = cam.hue + self.internalChroma = cam.chroma + self.internalTone = tone + + # Adding properties for getters and setters + hue = property(get_hue, set_hue) + chroma = property(get_chroma, set_chroma) + tone = property(get_tone, set_tone) diff --git a/python/hct/viewing_conditions.py b/python/hct/viewing_conditions.py new file mode 100644 index 0000000..9f4442e --- /dev/null +++ b/python/hct/viewing_conditions.py @@ -0,0 +1,114 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from utils.color_utils import * +from utils.math_utils import * +import math + +# /** +# * In traditional color spaces, a color can be identified solely by the +# * observer's measurement of the color. Color appearance models such as CAM16 +# * also use information about the environment where the color was +# * observed, known as the viewing conditions. +# * +# * For example, white under the traditional assumption of a midday sun white +# * point is accurately measured as a slightly chromatic blue by CAM16. (roughly, +# * hue 203, chroma 3, lightness 100) +# * +# * This class caches intermediate values of the CAM16 conversion process that +# * depend only on viewing conditions, enabling speed ups. +# */ +class ViewingConditions: + # /** + # * Parameters are intermediate values of the CAM16 conversion process. Their + # * names are shorthand for technical color science terminology, this class + # * would not benefit from documenting them individually. A brief overview + # * is available in the CAM16 specification, and a complete overview requires + # * a color science textbook, such as Fairchild's Color Appearance Models. + # */ + def __init__(self, n, aw, nbb, ncb, c, nc, rgbD, fl, fLRoot, z): + self.n = n + self.aw = aw + self.nbb = nbb + self.ncb = ncb + self.c = c + self.nc = nc + self.rgbD = rgbD + self.fl = fl + self.fLRoot = fLRoot + self.z = z + + # /** + # * Create ViewingConditions from a simple, physically relevant, set of + # * parameters. + # * + # * @param whitePoint White point, measured in the XYZ color space. + # * default = D65, or sunny day afternoon + # * @param adaptingLuminance The luminance of the adapting field. Informally, + # * how bright it is in the room where the color is viewed. Can be + # * calculated from lux by multiplying lux by 0.0586. default = 11.72, + # * or 200 lux. + # * @param backgroundLstar The lightness of the area surrounding the color. + # * measured by L* in L*a*b*. default = 50.0 + # * @param surround A general description of the lighting surrounding the + # * color. 0 is pitch dark, like watching a movie in a theater. 1.0 is a + # * dimly light room, like watching TV at home at night. 2.0 means there + # * is no difference between the lighting on the color and around it. + # * default = 2.0 + # * @param discountingIlluminant Whether the eye accounts for the tint of the + # * ambient lighting, such as knowing an apple is still red in green light. + # * default = false, the eye does not perform this process on + # * self-luminous objects like displays. + # */ + @staticmethod + def make(whitePoint = whitePointD65(), adaptingLuminance = (200.0 / math.pi) * yFromLstar(50.0) / 100.0, backgroundLstar = 50.0, surround = 2.0, discountingIlluminant = False): + xyz = whitePoint + rW = xyz[0] * 0.401288 + xyz[1] * 0.650173 + xyz[2] * -0.051461 + gW = xyz[0] * -0.250268 + xyz[1] * 1.204414 + xyz[2] * 0.045854 + bW = xyz[0] * -0.002079 + xyz[1] * 0.048952 + xyz[2] * 0.953127 + f = 0.8 + surround / 10.0 + c = lerp(0.59, 0.69, (f - 0.9) * 10.0) if f >= 0.9 else lerp(0.525, 0.59, (f - 0.8) * 10.0) + d = 1.0 if discountingIlluminant else f * (1.0 - (1.0 / 3.6) * math.exp((-adaptingLuminance - 42.0) / 92.0)) + d = 1.0 if d > 1.0 else 0.0 if d < 0.0 else d + nc = f + rgbD = [ + d * (100.0 / rW) + 1.0 - d, + d * (100.0 / gW) + 1.0 - d, + d * (100.0 / bW) + 1.0 - d, + ] + k = 1.0 / (5.0 * adaptingLuminance + 1.0) + k4 = k * k * k * k + k4F = 1.0 - k4 + fl = k4 * adaptingLuminance + 0.1 * k4F * k4F * ((5.0 * adaptingLuminance)**(1. / 3)) + n = yFromLstar(backgroundLstar) / whitePoint[1] + z = 1.48 + math.sqrt(n) + nbb = 0.725 / pow(n, 0.2) + ncb = nbb + rgbAFactors = [ + pow((fl * rgbD[0] * rW) / 100.0, 0.42), + pow((fl * rgbD[1] * gW) / 100.0, 0.42), + pow((fl * rgbD[2] * bW) / 100.0, 0.42), + ] + rgbA = [ + (400.0 * rgbAFactors[0]) / (rgbAFactors[0] + 27.13), + (400.0 * rgbAFactors[1]) / (rgbAFactors[1] + 27.13), + (400.0 * rgbAFactors[2]) / (rgbAFactors[2] + 27.13), + ] + aw = (2.0 * rgbA[0] + rgbA[1] + 0.05 * rgbA[2]) * nbb + return ViewingConditions(n, aw, nbb, ncb, c, nc, rgbD, fl, pow(fl, 0.25), z) +# /** sRGB-like viewing conditions. */ +ViewingConditions.DEFAULT = ViewingConditions.make() \ No newline at end of file diff --git a/python/palettes/core_palette.py b/python/palettes/core_palette.py new file mode 100644 index 0000000..db5aab3 --- /dev/null +++ b/python/palettes/core_palette.py @@ -0,0 +1,42 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from hct.hct import * +from palettes.tonal_palette import * + +# /** +# * An intermediate concept between the key color for a UI theme, and a full +# * color scheme. 5 sets of tones are generated, all except one use the same hue +# * as the key color, and all vary in chroma. +# */ +class CorePalette: + def __init__(self, argb): + hct = Hct.fromInt(argb) + hue = hct.hue + self.a1 = TonalPalette.fromHueAndChroma(hue, max(48, hct.chroma)) + self.a2 = TonalPalette.fromHueAndChroma(hue, 16) + self.a3 = TonalPalette.fromHueAndChroma(hue + 60, 24) + self.n1 = TonalPalette.fromHueAndChroma(hue, 4) + self.n2 = TonalPalette.fromHueAndChroma(hue, 8) + self.error = TonalPalette.fromHueAndChroma(25, 84) + + # /** + # * @param argb ARGB representation of a color + # */ + @staticmethod + def of(argb): + return CorePalette(argb); diff --git a/python/palettes/tonal_palette.py b/python/palettes/tonal_palette.py new file mode 100644 index 0000000..9949684 --- /dev/null +++ b/python/palettes/tonal_palette.py @@ -0,0 +1,61 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from hct.hct import * +from collections import OrderedDict + +# /** +# * A convenience class for retrieving colors that are constant in hue and +# * chroma, but vary in tone. +# */ +class TonalPalette: + # Using OrderedDict() as replacement for Map() + def __init__(self, hue, chroma): + self.hue = hue + self.chroma = chroma + self.cache = OrderedDict() + + # /** + # * @param argb ARGB representation of a color + # * @return Tones matching that color's hue and chroma. + # */ + @staticmethod + def fromInt(argb): + hct = Hct.fromInt(argb) + return TonalPalette.fromHueAndChroma(hct.hue, hct.chroma) + + # /** + # * @param hue HCT hue + # * @param chroma HCT chroma + # * @return Tones matching hue and chroma. + # */ + @staticmethod + def fromHueAndChroma(hue, chroma): + return TonalPalette(hue, chroma) + + # /** + # * @param tone HCT tone, measured from 0 to 100. + # * @return ARGB representation of a color with that tone. + # */ + def tone(self, tone): + argb = None + if (tone not in self.cache.keys()): + argb = Hct.fromHct(self.hue, self.chroma, tone).toInt() + self.cache[tone] = argb + else: + argb = self.cache[tone] + return argb diff --git a/python/quantize/lab_point_provider.py b/python/quantize/lab_point_provider.py new file mode 100644 index 0000000..b18d94f --- /dev/null +++ b/python/quantize/lab_point_provider.py @@ -0,0 +1,51 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from utils.color_utils import * + +# /** +# * Provides conversions needed for K-Means quantization. Converting input to +# * points, and converting the final state of the K-Means algorithm to colors. +# */ +class LabPointProvider: + # /** + # * Convert a color represented in ARGB to a 3-element array of L*a*b* + # * coordinates of the color. + # */ + def fromInt(self, argb): + return labFromArgb(argb) + + # /** + # * Convert a 3-element array to a color represented in ARGB. + # */ + def toInt(self, point): + return argbFromLab(point[0], point[1], point[2]) + + # /** + # * Standard CIE 1976 delta E formula also takes the square root, unneeded + # * here. This method is used by quantization algorithms to compare distance, + # * and the relative ordering is the same, with or without a square root. + # * + # * This relatively minor optimization is helpful because this method is + # * called at least once for each pixel in an image. + # */ + # Renamed "from" to "from_v", from is reserved in Python + def distance(self, from_v, to): + dL = from_v[0] - to[0] + dA = from_v[1] - to[1] + dB = from_v[2] - to[2] + return dL * dL + dA * dA + dB * dB diff --git a/python/quantize/quantizer_celebi.py b/python/quantize/quantizer_celebi.py new file mode 100644 index 0000000..b58d6ef --- /dev/null +++ b/python/quantize/quantizer_celebi.py @@ -0,0 +1,50 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from quantize.quantizer_wsmeans import * +from quantize.quantizer_wu import * + +# /** +# * An image quantizer that improves on the quality of a standard K-Means +# * algorithm by setting the K-Means initial state to the output of a Wu +# * quantizer, instead of random centroids. Improves on speed by several +# * optimizations, as implemented in Wsmeans, or Weighted Square Means, K-Means +# * with those optimizations. +# * +# * This algorithm was designed by M. Emre Celebi, and was found in their 2011 +# * paper, Improving the Performance of K-Means for Color Quantization. +# * https://arxiv.org/abs/1101.0395 +# */ +# // libmonet is designed to have a consistent API across platforms +# // and modular components that can be moved around easily. Using a class as a +# // namespace facilitates this. +# // +# // tslint:disable-next-line:class-as-namespace +class QuantizerCelebi: + # /** + # * @param pixels Colors in ARGB format. + # * @param maxColors The number of colors to divide the image into. A lower + # * number of colors may be returned. + # * @return Map with keys of colors in ARGB format, and values of number of + # * pixels in the original image that correspond to the color in the + # * quantized image. + # */ + @staticmethod + def quantize(pixels, maxColors): + wu = QuantizerWu() + wuResult = wu.quantize(pixels, maxColors) + return QuantizerWsmeans.quantize(pixels, wuResult, maxColors) diff --git a/python/quantize/quantizer_map.py b/python/quantize/quantizer_map.py new file mode 100644 index 0000000..33c1261 --- /dev/null +++ b/python/quantize/quantizer_map.py @@ -0,0 +1,45 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from utils.color_utils import * +from collections import OrderedDict + +# /** +# * Quantizes an image into a map, with keys of ARGB colors, and values of the +# * number of times that color appears in the image. +# */ +# // libmonet is designed to have a consistent API across platforms +# // and modular components that can be moved around easily. Using a class as a +# // namespace facilitates this. +# // +# // tslint:disable-next-line:class-as-namespace +class QuantizerMap: + # /** + # * @param pixels Colors in ARGB format. + # * @return A Map with keys of ARGB colors, and values of the number of times + # * the color appears in the image. + # */ + @staticmethod + def quantize(pixels): + countByColor = OrderedDict() + for i in range(len(pixels)): + pixel = pixels[i] + alpha = alphaFromArgb(pixel) + if (alpha < 255): + continue + countByColor[pixel] = (countByColor[pixel] if pixel in countByColor.keys() else 0) + 1 + return countByColor diff --git a/python/quantize/quantizer_wsmeans.py b/python/quantize/quantizer_wsmeans.py new file mode 100644 index 0000000..dea72e3 --- /dev/null +++ b/python/quantize/quantizer_wsmeans.py @@ -0,0 +1,182 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from quantize.lab_point_provider import * +from collections import OrderedDict +import random +import math + +MAX_ITERATIONS = 10 +MIN_MOVEMENT_DISTANCE = 3.0 + +# /** +# * An image quantizer that improves on the speed of a standard K-Means algorithm +# * by implementing several optimizations, including deduping identical pixels +# * and a triangle inequality rule that reduces the number of comparisons needed +# * to identify which cluster a point should be moved to. +# * +# * Wsmeans stands for Weighted Square Means. +# * +# * This algorithm was designed by M. Emre Celebi, and was found in their 2011 +# * paper, Improving the Performance of K-Means for Color Quantization. +# * https://arxiv.org/abs/1101.0395 +# */ +# // libmonet is designed to have a consistent API across platforms +# // and modular components that can be moved around easily. Using a class as a +# // namespace facilitates this. +# // +# // tslint:disable-next-line:class-as-namespace +class QuantizerWsmeans: + # /** + # * @param inputPixels Colors in ARGB format. + # * @param startingClusters Defines the initial state of the quantizer. Passing + # * an empty array is fine, the implementation will create its own initial + # * state that leads to reproducible results for the same inputs. + # * Passing an array that is the result of Wu quantization leads to higher + # * quality results. + # * @param maxColors The number of colors to divide the image into. A lower + # * number of colors may be returned. + # * @return Colors in ARGB format. + # */ + # Replacing Map() with OrderedDict() + @staticmethod + def quantize(inputPixels, startingClusters, maxColors): + random.seed(69) + pixelToCount = OrderedDict() + points = [] + pixels = [] + pointProvider = LabPointProvider() + pointCount = 0 + for i in range(len(inputPixels)): + inputPixel = inputPixels[i] + if (inputPixel not in pixelToCount.keys()): + pointCount += 1 + points.append(pointProvider.fromInt(inputPixel)) + pixels.append(inputPixel) + pixelToCount[inputPixel] = 1 + else: + pixelToCount[inputPixel] = pixelToCount[inputPixel] + 1 + counts = [] + for i in range(pointCount): + pixel = pixels[i] + if (pixel in pixelToCount.keys()): + # counts[i] = pixelToCount[pixel] + counts.append(pixelToCount[pixel]) + clusterCount = min(maxColors, pointCount) + if (len(startingClusters) > 0): + clusterCount = min(clusterCount, len(startingClusters)) + clusters = [] + for i in range(len(startingClusters)): + clusters.append(pointProvider.fromInt(startingClusters[i])) + additionalClustersNeeded = clusterCount - len(clusters) + if (len(startingClusters) == 0 and additionalClustersNeeded > 0): + for i in range(additionalClustersNeeded): + l = random.uniform(0, 1) * 100.0 + a = random.uniform(0, 1) * (100.0 - (-100.0) + 1) + -100 + b = random.uniform(0, 1) * (100.0 - (-100.0) + 1) + -100 + clusters.append([l, a, b]) + clusterIndices = [] + for i in range(pointCount): + clusterIndices.append(math.floor(random.uniform(0, 1) * clusterCount)) + indexMatrix = [] + for i in range(clusterCount): + indexMatrix.append([]) + for j in range(clusterCount): + indexMatrix[i].append(0) + distanceToIndexMatrix = [] + for i in range(clusterCount): + distanceToIndexMatrix.append([]) + for j in range(clusterCount): + distanceToIndexMatrix[i].append(DistanceAndIndex()) + pixelCountSums = [] + for i in range(clusterCount): + pixelCountSums.append(0) + for iteration in range(MAX_ITERATIONS): + for i in range(clusterCount): + for j in range(i + 1, clusterCount): + distance = pointProvider.distance(clusters[i], clusters[j]) + distanceToIndexMatrix[j][i].distance = distance + distanceToIndexMatrix[j][i].index = i + distanceToIndexMatrix[i][j].distance = distance + distanceToIndexMatrix[i][j].index = j + # This sort here doesn't seem to do anything because arr of objects + # leaving just in case though + # distanceToIndexMatrix[i].sort() + for j in range(clusterCount): + indexMatrix[i][j] = distanceToIndexMatrix[i][j].index + pointsMoved = 0 + for i in range(pointCount): + point = points[i] + previousClusterIndex = clusterIndices[i] + previousCluster = clusters[previousClusterIndex] + previousDistance = pointProvider.distance(point, previousCluster) + minimumDistance = previousDistance + newClusterIndex = -1 + for j in range(clusterCount): + if (distanceToIndexMatrix[previousClusterIndex][j].distance >= 4 * previousDistance): + continue + distance = pointProvider.distance(point, clusters[j]) + if (distance < minimumDistance): + minimumDistance = distance + newClusterIndex = j + if (newClusterIndex != -1): + distanceChange = abs((math.sqrt(minimumDistance) - math.sqrt(previousDistance))) + if (distanceChange > MIN_MOVEMENT_DISTANCE): + pointsMoved += 1 + clusterIndices[i] = newClusterIndex + if (pointsMoved == 0 and iteration != 0): + break + componentASums = [0] * clusterCount + componentBSums = [0] * clusterCount + componentCSums = [0] * clusterCount + for i in range(clusterCount): + pixelCountSums[i] = 0 + for i in range(pointCount): + clusterIndex = clusterIndices[i] + point = points[i] + count = counts[i] + pixelCountSums[clusterIndex] += count + componentASums[clusterIndex] += (point[0] * count) + componentBSums[clusterIndex] += (point[1] * count) + componentCSums[clusterIndex] += (point[2] * count) + for i in range(clusterCount): + count = pixelCountSums[i] + if (count == 0): + clusters[i] = [0.0, 0.0, 0.0] + continue + a = componentASums[i] / count + b = componentBSums[i] / count + c = componentCSums[i] / count + clusters[i] = [a, b, c] + argbToPopulation = OrderedDict() + for i in range(clusterCount): + count = pixelCountSums[i] + if (count == 0): + continue + possibleNewCluster = pointProvider.toInt(clusters[i]) + if (possibleNewCluster in argbToPopulation.keys()): + continue + argbToPopulation[possibleNewCluster] = count + return argbToPopulation + +# /** +# * A wrapper for maintaining a table of distances between K-Means clusters. +# */ +class DistanceAndIndex: + def __init__(self): + self.distance = -1 + self.index = -1 diff --git a/python/quantize/quantizer_wu.py b/python/quantize/quantizer_wu.py new file mode 100644 index 0000000..29e79c0 --- /dev/null +++ b/python/quantize/quantizer_wu.py @@ -0,0 +1,308 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from utils.color_utils import * +from quantize.quantizer_map import * + +INDEX_BITS = 5 +SIDE_LENGTH = 33 # ((1 << INDEX_INDEX_BITS) + 1) +TOTAL_SIZE = 35937 # SIDE_LENGTH * SIDE_LENGTH * SIDE_LENGTH +directions = { + "RED" : 'red', + "GREEN" : 'green', + "BLUE": 'blue', +} + +# /** +# * An image quantizer that divides the image's pixels into clusters by +# * recursively cutting an RGB cube, based on the weight of pixels in each area +# * of the cube. +# * +# * The algorithm was described by Xiaolin Wu in Graphic Gems II, published in +# * 1991. +# */ +class QuantizerWu: + def __init__(self, weights = [], momentsR = [], momentsG = [], momentsB = [], moments = [], cubes = []): + self.weights = weights + self.momentsR = momentsR + self.momentsG = momentsG + self.momentsB = momentsB + self.moments = moments + self.cubes = cubes + + # /** + # * @param pixels Colors in ARGB format. + # * @param maxColors The number of colors to divide the image into. A lower + # * number of colors may be returned. + # * @return Colors in ARGB format. + # */ + def quantize(self, pixels, maxColors): + self.constructHistogram(pixels) + self.computeMoments() + createBoxesResult = self.createBoxes(maxColors) + results = self.createResult(createBoxesResult.resultCount) + return results + + def constructHistogram(self, pixels): + _a = None + self.weights = [0] * TOTAL_SIZE + self.momentsR = [0] * TOTAL_SIZE + self.momentsG = [0] * TOTAL_SIZE + self.momentsB = [0] * TOTAL_SIZE + self.moments = [0] * TOTAL_SIZE + countByColor = QuantizerMap.quantize(pixels) + for (pixel, count) in countByColor.items(): + red = redFromArgb(pixel) + green = greenFromArgb(pixel) + blue = blueFromArgb(pixel) + bitsToRemove = 8 - INDEX_BITS + iR = (red >> bitsToRemove) + 1 + iG = (green >> bitsToRemove) + 1 + iB = (blue >> bitsToRemove) + 1 + index = self.getIndex(iR, iG, iB) + self.weights[index] = (self.weights[index] if len(self.weights) > index else 0) + count + self.momentsR[index] += count * red + self.momentsG[index] += count * green + self.momentsB[index] += count * blue + self.moments[index] += count * (red * red + green * green + blue * blue) + + def computeMoments(self): + for r in range(1, SIDE_LENGTH): + area = [0] * SIDE_LENGTH + areaR = [0] * SIDE_LENGTH + areaG = [0] * SIDE_LENGTH + areaB = [0] * SIDE_LENGTH + area2 = [0.0] * SIDE_LENGTH + for g in range(1, SIDE_LENGTH): + line = 0 + lineR = 0 + lineG = 0 + lineB = 0 + line2 = 0.0 + for b in range(1, SIDE_LENGTH): + index = self.getIndex(r, g, b) + line += self.weights[index] + lineR += self.momentsR[index] + lineG += self.momentsG[index] + lineB += self.momentsB[index] + line2 += self.moments[index] + area[b] += line + areaR[b] += lineR + areaG[b] += lineG + areaB[b] += lineB + area2[b] += line2 + previousIndex = self.getIndex(r - 1, g, b) + self.weights[index] = self.weights[previousIndex] + area[b] + self.momentsR[index] = self.momentsR[previousIndex] + areaR[b] + self.momentsG[index] = self.momentsG[previousIndex] + areaG[b] + self.momentsB[index] = self.momentsB[previousIndex] + areaB[b] + self.moments[index] = self.moments[previousIndex] + area2[b] + + def createBoxes(self, maxColors): + self.cubes = [Box() for x in [0] * maxColors] + volumeVariance = [0.0] * maxColors + self.cubes[0].r0 = 0 + self.cubes[0].g0 = 0 + self.cubes[0].b0 = 0 + self.cubes[0].r1 = SIDE_LENGTH - 1 + self.cubes[0].g1 = SIDE_LENGTH - 1 + self.cubes[0].b1 = SIDE_LENGTH - 1 + generatedColorCount = maxColors + next = 0 + for i in range(1, maxColors): + if (self.cut(self.cubes[next], self.cubes[i])): + volumeVariance[next] = self.variance(self.cubes[next]) if self.cubes[next].vol > 1 else 0.0 + volumeVariance[i] = self.variance(self.cubes[i]) if self.cubes[i].vol > 1 else 0.0 + else: + volumeVariance[next] = 0.0 + i -= 1 + next = 0 + temp = volumeVariance[0] + for j in range(1, i): + if (volumeVariance[j] > temp): + temp = volumeVariance[j] + next = j + if (temp <= 0.0): + generatedColorCount = i + 1 + break + return CreateBoxesResult(maxColors, generatedColorCount) + + def createResult(self, colorCount): + colors = [] + for i in range(colorCount): + cube = self.cubes[i] + weight = self.volume(cube, self.weights) + if (weight > 0): + r = round(self.volume(cube, self.momentsR) / weight) + g = round(self.volume(cube, self.momentsG) / weight) + b = round(self.volume(cube, self.momentsB) / weight) + color = (255 << 24) | ((r & 0x0ff) << 16) | ((g & 0x0ff) << 8) | (b & 0x0ff) + colors.append(color) + return colors + + def variance(self, cube): + dr = self.volume(cube, self.momentsR) + dg = self.volume(cube, self.momentsG) + db = self.volume(cube, self.momentsB) + xx = self.moments[self.getIndex(cube.r1, cube.g1, cube.b1)] - self.moments[self.getIndex(cube.r1, cube.g1, cube.b0)] - self.moments[self.getIndex(cube.r1, cube.g0, cube.b1)] + self.moments[self.getIndex(cube.r1, cube.g0, cube.b0)] - self.moments[self.getIndex(cube.r0, cube.g1, cube.b1)] + self.moments[self.getIndex(cube.r0, cube.g1, cube.b0)] + self.moments[self.getIndex(cube.r0, cube.g0, cube.b1)] - self.moments[self.getIndex(cube.r0, cube.g0, cube.b0)] + hypotenuse = dr * dr + dg * dg + db * db + volume = self.volume(cube, self.weights) + return xx - hypotenuse / volume + + def cut(self, one, two): + wholeR = self.volume(one, self.momentsR) + wholeG = self.volume(one, self.momentsG) + wholeB = self.volume(one, self.momentsB) + wholeW = self.volume(one, self.weights) + maxRResult = self.maximize(one, directions["RED"], one.r0 + 1, one.r1, wholeR, wholeG, wholeB, wholeW) + maxGResult = self.maximize(one, directions["GREEN"], one.g0 + 1, one.g1, wholeR, wholeG, wholeB, wholeW) + maxBResult = self.maximize(one, directions["BLUE"], one.b0 + 1, one.b1, wholeR, wholeG, wholeB, wholeW) + direction = None + maxR = maxRResult.maximum + maxG = maxGResult.maximum + maxB = maxBResult.maximum + if (maxR >= maxG and maxR >= maxB): + if (maxRResult.cutLocation < 0): + return False + direction = directions["RED"] + elif (maxG >= maxR and maxG >= maxB): + direction = directions["GREEN"] + else: + direction = directions["BLUE"] + two.r1 = one.r1 + two.g1 = one.g1 + two.b1 = one.b1 + + if (direction == directions["RED"]): + one.r1 = maxRResult.cutLocation + two.r0 = one.r1 + two.g0 = one.g0 + two.b0 = one.b0 + elif (direction == directions["GREEN"]): + one.g1 = maxGResult.cutLocation + two.r0 = one.r0 + two.g0 = one.g1 + two.b0 = one.b0 + elif (direction == directions["BLUE"]): + one.b1 = maxBResult.cutLocation + two.r0 = one.r0 + two.g0 = one.g0 + two.b0 = one.b1 + else: + raise Exception('unexpected direction ' + direction) + + one.vol = (one.r1 - one.r0) * (one.g1 - one.g0) * (one.b1 - one.b0) + two.vol = (two.r1 - two.r0) * (two.g1 - two.g0) * (two.b1 - two.b0) + return True + + def maximize(self, cube, direction, first, last, wholeR, wholeG, wholeB, wholeW): + bottomR = self.bottom(cube, direction, self.momentsR) + bottomG = self.bottom(cube, direction, self.momentsG) + bottomB = self.bottom(cube, direction, self.momentsB) + bottomW = self.bottom(cube, direction, self.weights) + max = 0.0 + cut = -1 + halfR = 0 + halfG = 0 + halfB = 0 + halfW = 0 + for i in range(first, last): + halfR = bottomR + self.top(cube, direction, i, self.momentsR) + halfG = bottomG + self.top(cube, direction, i, self.momentsG) + halfB = bottomB + self.top(cube, direction, i, self.momentsB) + halfW = bottomW + self.top(cube, direction, i, self.weights) + if (halfW == 0): + continue + tempNumerator = (halfR * halfR + halfG * halfG + halfB * halfB) * 1.0 + tempDenominator = halfW * 1.0 + temp = tempNumerator / tempDenominator + halfR = wholeR - halfR + halfG = wholeG - halfG + halfB = wholeB - halfB + halfW = wholeW - halfW + if (halfW == 0): + continue + tempNumerator = (halfR * halfR + halfG * halfG + halfB * halfB) * 1.0 + tempDenominator = halfW * 1.0 + temp += tempNumerator / tempDenominator + if (temp > max): + max = temp + cut = i + return MaximizeResult(cut, max) + + def volume(self, cube, moment): + return (moment[self.getIndex(cube.r1, cube.g1, cube.b1)] - moment[self.getIndex(cube.r1, cube.g1, cube.b0)] - moment[self.getIndex(cube.r1, cube.g0, cube.b1)] + moment[self.getIndex(cube.r1, cube.g0, cube.b0)] - moment[self.getIndex(cube.r0, cube.g1, cube.b1)] + moment[self.getIndex(cube.r0, cube.g1, cube.b0)] + moment[self.getIndex(cube.r0, cube.g0, cube.b1)] - moment[self.getIndex(cube.r0, cube.g0, cube.b0)]) + + def bottom(self, cube, direction, moment): + if (direction == directions["RED"]): + return (-moment[self.getIndex(cube.r0, cube.g1, cube.b1)] + moment[self.getIndex(cube.r0, cube.g1, cube.b0)] + moment[self.getIndex(cube.r0, cube.g0, cube.b1)] - moment[self.getIndex(cube.r0, cube.g0, cube.b0)]) + elif (direction == directions["GREEN"]): + return (-moment[self.getIndex(cube.r1, cube.g0, cube.b1)] + moment[self.getIndex(cube.r1, cube.g0, cube.b0)] + moment[self.getIndex(cube.r0, cube.g0, cube.b1)] - moment[self.getIndex(cube.r0, cube.g0, cube.b0)]) + elif (direction == directions["BLUE"]): + return (-moment[self.getIndex(cube.r1, cube.g1, cube.b0)] + moment[self.getIndex(cube.r1, cube.g0, cube.b0)] + moment[self.getIndex(cube.r0, cube.g1, cube.b0)] - moment[self.getIndex(cube.r0, cube.g0, cube.b0)]) + else: + raise Exception('unexpected direction ' + direction) + + def top(self, cube, direction, position, moment): + if (direction == directions["RED"]): + return (moment[self.getIndex(position, cube.g1, cube.b1)] - moment[self.getIndex(position, cube.g1, cube.b0)] - moment[self.getIndex(position, cube.g0, cube.b1)] + moment[self.getIndex(position, cube.g0, cube.b0)]) + elif (direction == directions["GREEN"]): + return (moment[self.getIndex(cube.r1, position, cube.b1)] - moment[self.getIndex(cube.r1, position, cube.b0)] - moment[self.getIndex(cube.r0, position, cube.b1)] + moment[self.getIndex(cube.r0, position, cube.b0)]) + elif (direction == directions["BLUE"]): + return (moment[self.getIndex(cube.r1, cube.g1, position)] - moment[self.getIndex(cube.r1, cube.g0, position)] - moment[self.getIndex(cube.r0, cube.g1, position)] + moment[self.getIndex(cube.r0, cube.g0, position)]) + else: + raise Exception('unexpected direction ' + direction) + + def getIndex(self, r, g, b): + return (r << (INDEX_BITS * 2)) + (r << (INDEX_BITS + 1)) + r + (g << INDEX_BITS) + g + b + +# /** +# * Keeps track of the state of each box created as the Wu quantization +# * algorithm progresses through dividing the image's pixels as plotted in RGB. +# */ +class Box: + def __init__(self, r0 = 0, r1 = 0, g0 = 0, g1 = 0, b0 = 0, b1 = 0, vol = 0): + self.r0 = r0 + self.r1 = r1 + self.g0 = g0 + self.g1 = g1 + self.b0 = b0 + self.b1 = b1 + self.vol = vol + +# /** +# * Represents final result of Wu algorithm. +# */ +class CreateBoxesResult: + # /** + # * @param requestedCount how many colors the caller asked to be returned from + # * quantization. + # * @param resultCount the actual number of colors achieved from quantization. + # * May be lower than the requested count. + # */ + def __init__(self, requestedCount, resultCount): + self.requestedCount = requestedCount + self.resultCount = resultCount + +# /** +# * Represents the result of calculating where to cut an existing box in such +# * a way to maximize variance between the two new boxes created by a cut. +# */ +class MaximizeResult: + def __init__(self, cutLocation, maximum): + self.cutLocation = cutLocation + self.maximum = maximum diff --git a/python/scheme/scheme.py b/python/scheme/scheme.py new file mode 100644 index 0000000..c1f0507 --- /dev/null +++ b/python/scheme/scheme.py @@ -0,0 +1,213 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from palettes.core_palette import * +import json + +# /** +# * Represents a Material color scheme, a mapping of color roles to colors. +# */ +# Using dictionary instead of JavaScript Object +class Scheme: + def __init__(self, props): + self.props = props + + def get_primary(self): + return self.props["primary"] + + def get_primaryContainer(self): + return self.props["primaryContainer"] + + def get_onPrimary(self): + return self.props["onPrimary"] + + def get_onPrimaryContainer(self): + return self.props["onPrimaryContainer"] + + def get_secondary(self): + return self.props["secondary"] + + def get_secondaryContainer(self): + return self.props["secondaryContainer"] + + def get_onSecondary(self): + return self.props["onSecondary"] + + def get_onSecondaryContainer(self): + return self.props["onSecondaryContainer"] + + def get_tertiary(self): + return self.props["tertiary"] + + def get_onTertiary(self): + return self.props["onTertiary"] + + def get_tertiaryContainer(self): + return self.props["tertiaryContainer"] + + def get_onTertiaryContainer(self): + return self.props["onTertiaryContainer"] + + def get_error(self): + return self.props["error"] + + def get_onError(self): + return self.props["onError"] + + def get_errorContainer(self): + return self.props["errorContainer"] + + def get_onErrorContainer(self): + return self.props["onErrorContainer"] + + def get_background(self): + return self.props["background"] + + def get_onBackground(self): + return self.props["onBackground"] + + def get_surface(self): + return self.props["surface"] + + def get_onSurface(self): + return self.props["onSurface"] + + def get_surfaceVariant(self): + return self.props["surfaceVariant"] + + def get_onSurfaceVariant(self): + return self.props["onSurfaceVariant"] + + def get_outline(self): + return self.props["outline"] + + def get_shadow(self): + return self.props["shadow"] + + def get_inverseSurface(self): + return self.props["inverseSurface"] + + def get_inverseOnSurface(self): + return self.props["inverseOnSurface"] + + def get_inversePrimary(self): + return self.props["inversePrimary"] + + primary = property(get_primary) + primaryContainer = property(get_primaryContainer) + onPrimary = property(get_onPrimary) + onPrimaryContainer = property(get_onPrimaryContainer) + secondary = property(get_secondary) + secondaryContainer = property(get_secondaryContainer) + onSecondary = property(get_onSecondary) + onSecondaryContainer = property(get_onSecondaryContainer) + tertiary = property(get_tertiary) + onTertiary = property(get_onTertiary) + tertiaryContainer = property(get_tertiaryContainer) + onTertiaryContainer = property(get_onTertiaryContainer) + error = property(get_error) + onError = property(get_onError) + errorContainer = property(get_errorContainer) + onErrorContainer = property(get_onErrorContainer) + background = property(get_background) + onBackground = property(get_onBackground) + surface = property(get_surface) + onSurface = property(get_onSurface) + surfaceVariant = property(get_surfaceVariant) + onSurfaceVariant = property(get_onSurfaceVariant) + outline = property(get_outline) + shadow = property(get_shadow) + inverseSurface = property(get_inverseSurface) + inverseOnSurface = property(get_inverseOnSurface) + inversePrimary = property(get_inversePrimary) + + # /** + # * @param argb ARGB representation of a color. + # * @return Light Material color scheme, based on the color's hue. + # */ + @staticmethod + def light(argb): + core = CorePalette.of(argb) + return Scheme({ + "primary" : core.a1.tone(40), + "onPrimary" : core.a1.tone(100), + "primaryContainer" : core.a1.tone(90), + "onPrimaryContainer" : core.a1.tone(10), + "secondary" : core.a2.tone(40), + "onSecondary" : core.a2.tone(100), + "secondaryContainer" : core.a2.tone(90), + "onSecondaryContainer" : core.a2.tone(10), + "tertiary" : core.a3.tone(40), + "onTertiary" : core.a3.tone(100), + "tertiaryContainer" : core.a3.tone(90), + "onTertiaryContainer" : core.a3.tone(10), + "error" : core.error.tone(40), + "onError" : core.error.tone(100), + "errorContainer" : core.error.tone(90), + "onErrorContainer" : core.error.tone(10), + "background" : core.n1.tone(99), + "onBackground" : core.n1.tone(10), + "surface" : core.n1.tone(99), + "onSurface" : core.n1.tone(10), + "surfaceVariant" : core.n2.tone(90), + "onSurfaceVariant" : core.n2.tone(30), + "outline" : core.n2.tone(50), + "shadow" : core.n1.tone(0), + "inverseSurface" : core.n1.tone(20), + "inverseOnSurface" : core.n1.tone(95), + "inversePrimary" : core.a1.tone(80) + }) + + # /** + # * @param argb ARGB representation of a color. + # * @return Dark Material color scheme, based on the color's hue. + # */ + @staticmethod + def dark(argb): + core = CorePalette.of(argb); + return Scheme({ + "primary" : core.a1.tone(80), + "onPrimary" : core.a1.tone(20), + "primaryContainer" : core.a1.tone(30), + "onPrimaryContainer" : core.a1.tone(90), + "secondary" : core.a2.tone(80), + "onSecondary" : core.a2.tone(20), + "secondaryContainer" : core.a2.tone(30), + "onSecondaryContainer" : core.a2.tone(90), + "tertiary" : core.a3.tone(80), + "onTertiary" : core.a3.tone(20), + "tertiaryContainer" : core.a3.tone(30), + "onTertiaryContainer" : core.a3.tone(90), + "error" : core.error.tone(80), + "onError" : core.error.tone(20), + "errorContainer" : core.error.tone(30), + "onErrorContainer" : core.error.tone(80), + "background" : core.n1.tone(10), + "onBackground" : core.n1.tone(90), + "surface" : core.n1.tone(10), + "onSurface" : core.n1.tone(90), + "surfaceVariant" : core.n2.tone(30), + "onSurfaceVariant" : core.n2.tone(80), + "outline" : core.n2.tone(60), + "shadow" : core.n1.tone(0), + "inverseSurface" : core.n1.tone(90), + "inverseOnSurface" : core.n1.tone(20), + "inversePrimary" : core.a1.tone(40) + }) + + def toJSON(self): + return json.dumps(self.props) diff --git a/python/score/score.py b/python/score/score.py new file mode 100644 index 0000000..79d1ab6 --- /dev/null +++ b/python/score/score.py @@ -0,0 +1,128 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from hct.cam16 import * +from utils.color_utils import * +from utils.math_utils import * +from collections import OrderedDict + +# /** +# * Given a large set of colors, remove colors that are unsuitable for a UI +# * theme, and rank the rest based on suitability. +# * +# * Enables use of a high cluster count for image quantization, thus ensuring +# * colors aren't muddied, while curating the high cluster count to a much +# * smaller number of appropriate choices. +# */ +class Score: + def __init__(self): + pass + + # /** + # * Given a map with keys of colors and values of how often the color appears, + # * rank the colors based on suitability for being used for a UI theme. + # * + # * @param colorsToPopulation map with keys of colors and values of how often + # * the color appears, usually from a source image. + # * @return Colors sorted by suitability for a UI theme. The most suitable + # * color is the first item, the least suitable is the last. There will + # * always be at least one color returned. If all the input colors + # * were not suitable for a theme, a default fallback color will be + # * provided, Google Blue. + # */ + # Using OrderedDict for JavaScript Map + @staticmethod + def score(colorsToPopulation): + # // Determine the total count of all colors. + populationSum = 0 + for population in colorsToPopulation.values(): + populationSum += population + # // Turn the count of each color into a proportion by dividing by the total + # // count. Also, fill a cache of CAM16 colors representing each color, and + # // record the proportion of colors for each CAM16 hue. + colorsToProportion = OrderedDict() + colorsToCam = OrderedDict() + hueProportions = [0] * 361 + for (color, population) in colorsToPopulation.items(): + proportion = population / populationSum + colorsToProportion[color] = proportion + cam = Cam16.fromInt(color) + colorsToCam[color] = cam + hue = round(cam.hue) + hueProportions[hue] += proportion + # // Determine the proportion of the colors around each color, by summing the + # // proportions around each color's hue. + colorsToExcitedProportion = OrderedDict() + for (color, cam) in colorsToCam.items(): + hue = round(cam.hue) + excitedProportion = 0 + for i in range((hue - 15), (hue + 15)): + neighborHue = sanitizeDegreesInt(i) + excitedProportion += hueProportions[neighborHue] + colorsToExcitedProportion[color] = excitedProportion + # // Score the colors by their proportion, as well as how chromatic they are. + colorsToScore = OrderedDict() + for (color, cam) in colorsToCam.items(): + proportion = colorsToExcitedProportion[color] + proportionScore = proportion * 100.0 * Score.WEIGHT_PROPORTION + chromaWeight = Score.WEIGHT_CHROMA_BELOW if cam.chroma < Score.TARGET_CHROMA else Score.WEIGHT_CHROMA_ABOVE + chromaScore = (cam.chroma - Score.TARGET_CHROMA) * chromaWeight + score = proportionScore + chromaScore + colorsToScore[color] = score + # // Remove colors that are unsuitable, ex. very dark or unchromatic colors. + # // Also, remove colors that are very similar in hue. + filteredColors = Score.filter(colorsToExcitedProportion, colorsToCam) + dedupedColorsToScore = OrderedDict() + for color in filteredColors: + duplicateHue = False + hue = colorsToCam[color].hue + for alreadyChosenColor in dedupedColorsToScore: + alreadyChosenHue = colorsToCam[alreadyChosenColor].hue + if (differenceDegrees(hue, alreadyChosenHue) < 15): + duplicateHue = True + break + if (duplicateHue): + continue + dedupedColorsToScore[color] = colorsToScore[color] + # // Ensure the list of colors returned is sorted such that the first in the + # // list is the most suitable, and the last is the least suitable. + colorsByScoreDescending = list(dedupedColorsToScore.items()) + colorsByScoreDescending.sort(reverse = True, key = lambda x: x[1]) + answer = list(map(lambda x: x[0], colorsByScoreDescending)) + # // Ensure that at least one color is returned. + if (len(answer) == 0): + answer.append(0xff4285F4) # // Google Blue + return answer + + @staticmethod + def filter(colorsToExcitedProportion, colorsToCam): + filtered = [] + for (color, cam) in colorsToCam.items(): + proportion = colorsToExcitedProportion[color] + if (cam.chroma >= Score.CUTOFF_CHROMA and + lstarFromArgb(color) >= Score.CUTOFF_TONE and + proportion >= Score.CUTOFF_EXCITED_PROPORTION): + filtered.append(color) + return filtered + +Score.TARGET_CHROMA = 48.0 +Score.WEIGHT_PROPORTION = 0.7 +Score.WEIGHT_CHROMA_ABOVE = 0.3 +Score.WEIGHT_CHROMA_BELOW = 0.1 +Score.CUTOFF_CHROMA = 15.0 +Score.CUTOFF_TONE = 10.0 +Score.CUTOFF_EXCITED_PROPORTION = 0.01 diff --git a/python/utils/color_utils.py b/python/utils/color_utils.py new file mode 100644 index 0000000..4255cae --- /dev/null +++ b/python/utils/color_utils.py @@ -0,0 +1,269 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from utils.math_utils import * +import math + +# /** +# * Color science utilities. +# * +# * Utility methods for color science constants and color space +# * conversions that aren't HCT or CAM16. +# */ +SRGB_TO_XYZ = [ + [0.41233895, 0.35762064, 0.18051042], + [0.2126, 0.7152, 0.0722], + [0.01932141, 0.11916382, 0.95034478], +] +XYZ_TO_SRGB = [ + [ + 3.2413774792388685, + -1.5376652402851851, + -0.49885366846268053, + ], + [ + -0.9691452513005321, + 1.8758853451067872, + 0.04156585616912061, + ], + [ + 0.05562093689691305, + -0.20395524564742123, + 1.0571799111220335, + ], +] + +WHITE_POINT_D65 = [95.047, 100.0, 108.883] + +# /** +# * Converts a color from RGB components to ARGB format. +# */ +def rshift(val, n): return val>>n if val >= 0 else (val+0x100000000)>>n +def argbFromRgb(red, green, blue): + return rshift((255 << 24 | (red & 255) << 16 | (green & 255) << 8 | blue & 255), 0) + +# /** +# * Returns the alpha component of a color in ARGB format. +# */ +def alphaFromArgb(argb): + return argb >> 24 & 255 + +# /** +# * Returns the red component of a color in ARGB format. +# */ +def redFromArgb(argb): + return argb >> 16 & 255 + +# /** +# * Returns the green component of a color in ARGB format. +# */ +def greenFromArgb(argb): + return argb >> 8 & 255 + +# /** +# * Returns the blue component of a color in ARGB format. +# */ +def blueFromArgb(argb): + return argb & 255 + +# /** +# * Returns whether a color in ARGB format is opaque. +# */ +def isOpaque(argb): + return alphaFromArgb(argb) >= 255 + +# /** +# * Converts a color from ARGB to XYZ. +# */ +def argbFromXyz(x, y, z): + matrix = XYZ_TO_SRGB + linearR = matrix[0][0] * x + matrix[0][1] * y + matrix[0][2] * z + linearG = matrix[1][0] * x + matrix[1][1] * y + matrix[1][2] * z + linearB = matrix[2][0] * x + matrix[2][1] * y + matrix[2][2] * z + r = delinearized(linearR) + g = delinearized(linearG) + b = delinearized(linearB) + return argbFromRgb(r, g, b) + +# /** +# * Converts a color from XYZ to ARGB. +# */ +def xyzFromArgb(argb): + r = linearized(redFromArgb(argb)) + g = linearized(greenFromArgb(argb)) + b = linearized(blueFromArgb(argb)) + return matrixMultiply([r, g, b], SRGB_TO_XYZ) + +# /** +# * Converts a color represented in Lab color space into an ARGB +# * integer. +# */ +def labInvf(ft): + e = 216.0 / 24389.0 + kappa = 24389.0 / 27.0 + ft3 = ft * ft * ft + if (ft3 > e): + return ft3 + else: + return (116 * ft - 16) / kappa + +def argbFromLab(l, a, b): + whitePoint = WHITE_POINT_D65 + fy = (l + 16.0) / 116.0 + fx = a / 500.0 + fy + fz = fy - b / 200.0 + xNormalized = labInvf(fx) + yNormalized = labInvf(fy) + zNormalized = labInvf(fz) + x = xNormalized * whitePoint[0] + y = yNormalized * whitePoint[1] + z = zNormalized * whitePoint[2] + return argbFromXyz(x, y, z) + +# /** +# * Converts a color from ARGB representation to L*a*b* +# * representation. +# * +# * @param argb the ARGB representation of a color +# * @return a Lab object representing the color +# */ +def labF(t): + e = 216.0 / 24389.0 + kappa = 24389.0 / 27.0 + if (t > e): + return math.pow(t, 1.0 / 3.0) + else: + return (kappa * t + 16) / 116 + +def labFromArgb(argb): + linearR = linearized(redFromArgb(argb)) + linearG = linearized(greenFromArgb(argb)) + linearB = linearized(blueFromArgb(argb)) + matrix = SRGB_TO_XYZ + x = matrix[0][0] * linearR + matrix[0][1] * linearG + matrix[0][2] * linearB + y = matrix[1][0] * linearR + matrix[1][1] * linearG + matrix[1][2] * linearB + z = matrix[2][0] * linearR + matrix[2][1] * linearG + matrix[2][2] * linearB + whitePoint = WHITE_POINT_D65 + xNormalized = x / whitePoint[0] + yNormalized = y / whitePoint[1] + zNormalized = z / whitePoint[2] + fx = labF(xNormalized) + fy = labF(yNormalized) + fz = labF(zNormalized) + l = 116.0 * fy - 16 + a = 500.0 * (fx - fy) + b = 200.0 * (fy - fz) + return [l, a, b] + +# /** +# * Converts an L* value to an ARGB representation. +# * +# * @param lstar L* in L*a*b* +# * @return ARGB representation of grayscale color with lightness +# * matching L* +# */ +def argbFromLstar(lstar): + fy = (lstar + 16.0) / 116.0 + fz = fy + fx = fy + kappa = 24389.0 / 27.0 + epsilon = 216.0 / 24389.0 + lExceedsEpsilonKappa = lstar > 8.0 + y = fy * fy * fy if lExceedsEpsilonKappa else lstar / kappa + cubeExceedEpsilon = fy * fy * fy > epsilon + x = fx * fx * fx if cubeExceedEpsilon else lstar / kappa + z = fz * fz * fz if cubeExceedEpsilon else lstar / kappa + whitePoint = WHITE_POINT_D65 + return argbFromXyz(x * whitePoint[0], y * whitePoint[1], z * whitePoint[2]) + +# /** +# * Computes the L* value of a color in ARGB representation. +# * +# * @param argb ARGB representation of a color +# * @return L*, from L*a*b*, coordinate of the color +# */ +def lstarFromArgb(argb): + y = xyzFromArgb(argb)[1] / 100.0 + e = 216.0 / 24389.0 + if (y <= e): + return 24389.0 / 27.0 * y + else: + yIntermediate = math.pow(y, 1.0 / 3.0) + return 116.0 * yIntermediate - 16.0 + +# /** +# * Converts an L* value to a Y value. +# * +# * L* in L*a*b* and Y in XYZ measure the same quantity, luminance. +# * +# * L* measures perceptual luminance, a linear scale. Y in XYZ +# * measures relative luminance, a logarithmic scale. +# * +# * @param lstar L* in L*a*b* +# * @return Y in XYZ +# */ +def yFromLstar(lstar): + ke = 8.0 + if (lstar > ke): + return math.pow((lstar + 16.0) / 116.0, 3.0) * 100.0 + else: + return lstar / (24389.0 / 27.0) * 100.0 + +# /** +# * Linearizes an RGB component. +# * +# * @param rgbComponent 0 <= rgb_component <= 255, represents R/G/B +# * channel +# * @return 0.0 <= output <= 100.0, color channel converted to +# * linear RGB space +# */ +def linearized(rgbComponent): + normalized = rgbComponent / 255.0 + if (normalized <= 0.040449936): + return normalized / 12.92 * 100.0 + else: + return math.pow((normalized + 0.055) / 1.055, 2.4) * 100.0 + + +# /** +# * Delinearizes an RGB component. +# * +# * @param rgbComponent 0.0 <= rgb_component <= 100.0, represents +# * linear R/G/B channel +# * @return 0 <= output <= 255, color channel converted to regular +# * RGB space +# */ +def delinearized(rgbComponent): + normalized = rgbComponent / 100.0 + delinearized = 0.0 + if (normalized <= 0.0031308): + delinearized = normalized * 12.92 + else: + delinearized = 1.055 * math.pow(normalized, 1.0 / 2.4) - 0.055 + return clampInt(0, 255, round(delinearized * 255.0)) + +# /** +# * Returns the standard white point white on a sunny day. +# * +# * @return The white point +# */ +def whitePointD65(): + return WHITE_POINT_D65 + + + + diff --git a/python/utils/image_utils.py b/python/utils/image_utils.py new file mode 100644 index 0000000..f290db9 --- /dev/null +++ b/python/utils/image_utils.py @@ -0,0 +1,81 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from quantize.quantizer_celebi import * +from score.score import * +from utils.color_utils import * +from PIL import Image + +# /** +# * Get the source color from an image. +# * +# * @param image The image element +# * @return Source color - the color most suitable for creating a UI theme +# */ +def sourceColorFromImage(image): + # // Convert Image data to Pixel Array + # const imageBytes = await new Promise((resolve, reject) => { + # const canvas = document.createElement('canvas'); + # const context = canvas.getContext('2d'); + # if (!context) { + # return reject(new Error('Could not get canvas context')); + # } + # image.onload = () => { + # canvas.width = image.width; + # canvas.height = image.height; + # context.drawImage(image, 0, 0); + # resolve(context.getImageData(0, 0, image.width, image.height).data); + # }; + # }); + # // Convert Image data to Pixel Array + # const pixels = []; + # for (let i = 0; i < imageBytes.length; i += 4) { + # const r = imageBytes[i]; + # const g = imageBytes[i + 1]; + # const b = imageBytes[i + 2]; + # const a = imageBytes[i + 3]; + # if (a < 255) { + # continue; + # } + # const argb = argbFromRgb(r, g, b); + # pixels.push(argb); + # } + if (image.mode == 'RGB'): + image = image.convert('RGBA') + if (image.mode != 'RGBA'): + print("Warning: Image not in RGB|RGBA format - Converting...") + image = image.convert('RGBA') + + pixels = [] + for x in range(image.width): + for y in range(image.height): + # for the given pixel at w,h, lets check its value against the threshold + pixel = image.getpixel((x, y)) + r = pixel[0] + g = pixel[1] + b = pixel[2] + a = pixel[3] + if (a < 255): + continue + argb = argbFromRgb(r, g, b) + pixels.append(argb) + + # // Convert Pixels to Material Colors + result = QuantizerCelebi.quantize(pixels, 128) + ranked = Score.score(result) + top = ranked[0] + return top diff --git a/python/utils/math_utils.py b/python/utils/math_utils.py new file mode 100644 index 0000000..ceae6c0 --- /dev/null +++ b/python/utils/math_utils.py @@ -0,0 +1,105 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +# +# Utility methods for mathematical operations. +# +# +# The signum function. +# +# @return 1 if num > 0, -1 if num < 0, and 0 if num = 0 +# +def signum(num): + if (num < 0): + return -1 + elif (num == 0): + return 0 + else: + return 1 + +# /** +# * The linear interpolation function. +# * +# * @return start if amount = 0 and stop if amount = 1 +# */ +def lerp(start, stop, amount): + return (1.0 - amount) * start + amount * stop + +# /** +# * Clamps an integer between two integers. +# * +# * @return input when min <= input <= max, and either min or max +# * otherwise. +# */ +def clampInt(min, max, input): + if (input < min): + return min + elif (input > max): + return max + return input + +# /** +# * Clamps an integer between two floating-point numbers. +# * +# * @return input when min <= input <= max, and either min or max +# * otherwise. +# */ +def clampDouble(min, max, input): + if (input < min): + return min + elif (input > max): + return max + return input + +# /** +# * Sanitizes a degree measure as an integer. +# * +# * @return a degree measure between 0 (inclusive) and 360 +# * (exclusive). +# */ +def sanitizeDegreesInt(degrees): + degrees = degrees % 360 + if (degrees < 0): + degrees = degrees + 360 + return degrees + +# /** +# * Sanitizes a degree measure as a floating-point number. +# * +# * @return a degree measure between 0.0 (inclusive) and 360.0 +# * (exclusive). +# */ +def sanitizeDegreesDouble(degrees): + degrees = degrees % 360.0 + if (degrees < 0): + degrees = degrees + 360.0 + return degrees + +# /** +# * Distance of two points on a circle, represented using degrees. +# */ +def differenceDegrees(a, b): + return 180.0 - abs(abs(a - b) - 180.0) + +# /** +# * Multiplies a 1x3 row vector with a 3x3 matrix. +# */ +def matrixMultiply(row, matrix): + a = row[0] * matrix[0][0] + row[1] * matrix[0][1] + row[2] * matrix[0][2] + b = row[0] * matrix[1][0] + row[1] * matrix[1][1] + row[2] * matrix[1][2] + c = row[0] * matrix[2][0] + row[1] * matrix[2][1] + row[2] * matrix[2][2] + return [a, b, c]; \ No newline at end of file diff --git a/python/utils/string_utils.py b/python/utils/string_utils.py new file mode 100644 index 0000000..c8379b7 --- /dev/null +++ b/python/utils/string_utils.py @@ -0,0 +1,72 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from utils.color_utils import * + +# /** +# * Utility methods for hexadecimal representations of colors. +# */ +# /** +# * @param argb ARGB representation of a color. +# * @return Hex string representing color, ex. #ff0000 for red. +# */ +def hexFromArgb(argb): + r = redFromArgb(argb) + g = greenFromArgb(argb) + b = blueFromArgb(argb) + outParts = [f'{r:x}', f'{g:x}', f'{b:x}'] + # Pad single-digit output values + for i, part in enumerate(outParts): + if (len(part) == 1): + outParts[i] = '0' + part + return '#' + ''.join(outParts) + +# /** +# * @param hex String representing color as hex code. Accepts strings with or +# * without leading #, and string representing the color using 3, 6, or 8 +# * hex characters. +# * @return ARGB representation of color. +# */ +def parseIntHex(value): + # tslint:disable-next-line:ban + return int(value, 16) + +def argbFromHex(hex): + hex = hex.replace('#', '') + isThree = len(hex) == 3 + isSix = len(hex) == 6 + isEight = len(hex) == 8 + if (not isThree and not isSix and not isEight): + raise Exception('unexpected hex ' + hex) + + r = 0 + g = 0 + b = 0 + if (isThree): + r = parseIntHex(hex[0:1]*2) + g = parseIntHex(hex[1:2]*2) + b = parseIntHex(hex[2:3]*2) + elif (isSix): + r = parseIntHex(hex[0:2]) + g = parseIntHex(hex[2:4]) + b = parseIntHex(hex[4:6]) + elif (isEight): + r = parseIntHex(hex[2:4]) + g = parseIntHex(hex[4:6]) + b = parseIntHex(hex[6:8]) + + return rshift(((255 << 24) | ((r & 0x0ff) << 16) | ((g & 0x0ff) << 8) | (b & 0x0ff)), 0) diff --git a/python/utils/theme_utils.py b/python/utils/theme_utils.py new file mode 100644 index 0000000..1188bc4 --- /dev/null +++ b/python/utils/theme_utils.py @@ -0,0 +1,116 @@ +# /** +# * @license +# * Copyright 2021 Google LLC +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +from blend.blend import * +from palettes.core_palette import * +from scheme.scheme import * +from utils.image_utils import * +from utils.string_utils import * + +# /** +# * Generate custom color group from source and target color +# * +# * @param source Source color +# * @param color Custom color +# * @return Custom color group +# * +# * @link https://m3.material.io/styles/color/the-color-system/color-roles +# */ +# NOTE: Changes made to output format to be Dictionary +def customColor(source, color): + value = color["value"] + from_v = value + to = source + if (color["blend"]): + value = Blend.harmonize(from_v, to) + palette = CorePalette.of(value) + tones = palette.a1 + return { + "color": color, + "value": value, + "light": { + "color": tones.tone(40), + "onColor": tones.tone(100), + "colorContainer": tones.tone(90), + "onColorContainer": tones.tone(10), + }, + "dark": { + "color": tones.tone(80), + "onColor": tones.tone(20), + "colorContainer": tones.tone(30), + "onColorContainer": tones.tone(90), + }, + } + +# /** +# * Generate a theme from a source color +# * +# * @param source Source color +# * @param customColors Array of custom colors +# * @return Theme object +# */ +# NOTE: Changes made to output format to be Dictionary +def themeFromSourceColor(source, customColors = []): + palette = CorePalette.of(source) + return { + "source": source, + "schemes": { + "light": Scheme.light(source), + "dark": Scheme.dark(source), + }, + "palettes": { + "primary": palette.a1, + "secondary": palette.a2, + "tertiary": palette.a3, + "neutral": palette.n1, + "neutralVariant": palette.n2, + "error": palette.error, + }, + "customColors": [customColor(source, c) for c in customColors] + } + +# /** +# * Generate a theme from an image source +# * +# * @param image Image element +# * @param customColors Array of custom colors +# * @return Theme object +# */ +def themeFromImage(image, customColors = []): + source = sourceColorFromImage(image) + return themeFromSourceColor(source, customColors) + + +# Not really applicable to python CLI +# # /** +# # * Apply a theme to an element +# # * +# # * @param theme Theme object +# # * @param options Options +# # */ +# export function applyTheme(theme, options) { +# var _a; +# const target = (options === null || options === void 0 ? void 0 : options.target) || document.body; +# const isDark = (_a = options === null || options === void 0 ? void 0 : options.dark) !== null && _a !== void 0 ? _a : false; +# const scheme = isDark ? theme.schemes.dark : theme.schemes.light; +# for (const [key, value] of Object.entries(scheme.toJSON())) { +# const token = key.replace(/([a-z])([A-Z])/g, "$1-$2").toLowerCase(); +# const color = hexFromArgb(value); +# target.style.setProperty(`--md-sys-color-${token}`, color); +# } +# } +# //# sourceMappingURL=theme_utils.js.map \ No newline at end of file