up follow livre
This commit is contained in:
parent
70a5c3465c
commit
cffb31c1ef
12198 changed files with 2562132 additions and 35 deletions
|
|
@ -0,0 +1,4 @@
|
|||
"""fontTools.feaLib -- a package for dealing with OpenType feature files."""
|
||||
|
||||
# The structure of OpenType feature files is defined here:
|
||||
# http://www.adobe.com/devnet/opentype/afdko/topic_feature_file_syntax.html
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
from fontTools.ttLib import TTFont
|
||||
from fontTools.feaLib.builder import addOpenTypeFeatures, Builder
|
||||
from fontTools.feaLib.error import FeatureLibError
|
||||
from fontTools import configLogger
|
||||
from fontTools.misc.cliTools import makeOutputFileName
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
|
||||
log = logging.getLogger("fontTools.feaLib")
|
||||
|
||||
|
||||
def main(args=None):
|
||||
"""Add features from a feature file (.fea) into an OTF font"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Use fontTools to compile OpenType feature files (*.fea)."
|
||||
)
|
||||
parser.add_argument(
|
||||
"input_fea", metavar="FEATURES", help="Path to the feature file"
|
||||
)
|
||||
parser.add_argument(
|
||||
"input_font", metavar="INPUT_FONT", help="Path to the input font"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
dest="output_font",
|
||||
metavar="OUTPUT_FONT",
|
||||
help="Path to the output font.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--tables",
|
||||
metavar="TABLE_TAG",
|
||||
choices=Builder.supportedTables,
|
||||
nargs="+",
|
||||
help="Specify the table(s) to be built.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--debug",
|
||||
action="store_true",
|
||||
help="Add source-level debugging information to font.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
help="Increase the logger verbosity. Multiple -v " "options are allowed.",
|
||||
action="count",
|
||||
default=0,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--traceback", help="show traceback for exceptions.", action="store_true"
|
||||
)
|
||||
options = parser.parse_args(args)
|
||||
|
||||
levels = ["WARNING", "INFO", "DEBUG"]
|
||||
configLogger(level=levels[min(len(levels) - 1, options.verbose)])
|
||||
|
||||
output_font = options.output_font or makeOutputFileName(options.input_font)
|
||||
log.info("Compiling features to '%s'" % (output_font))
|
||||
|
||||
font = TTFont(options.input_font)
|
||||
try:
|
||||
addOpenTypeFeatures(
|
||||
font, options.input_fea, tables=options.tables, debug=options.debug
|
||||
)
|
||||
except FeatureLibError as e:
|
||||
if options.traceback:
|
||||
raise
|
||||
log.error(e)
|
||||
sys.exit(1)
|
||||
font.save(output_font)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
2143
venv/lib/python3.13/site-packages/fontTools/feaLib/ast.py
Normal file
2143
venv/lib/python3.13/site-packages/fontTools/feaLib/ast.py
Normal file
File diff suppressed because it is too large
Load diff
1808
venv/lib/python3.13/site-packages/fontTools/feaLib/builder.py
Normal file
1808
venv/lib/python3.13/site-packages/fontTools/feaLib/builder.py
Normal file
File diff suppressed because it is too large
Load diff
22
venv/lib/python3.13/site-packages/fontTools/feaLib/error.py
Normal file
22
venv/lib/python3.13/site-packages/fontTools/feaLib/error.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
class FeatureLibError(Exception):
|
||||
def __init__(self, message, location=None):
|
||||
Exception.__init__(self, message)
|
||||
self.location = location
|
||||
|
||||
def __str__(self):
|
||||
message = Exception.__str__(self)
|
||||
if self.location:
|
||||
return f"{self.location}: {message}"
|
||||
else:
|
||||
return message
|
||||
|
||||
|
||||
class IncludedFeaNotFound(FeatureLibError):
|
||||
def __str__(self):
|
||||
assert self.location is not None
|
||||
|
||||
message = (
|
||||
"The following feature file should be included but cannot be found: "
|
||||
f"{Exception.__str__(self)}"
|
||||
)
|
||||
return f"{self.location}: {message}"
|
||||
17351
venv/lib/python3.13/site-packages/fontTools/feaLib/lexer.c
Normal file
17351
venv/lib/python3.13/site-packages/fontTools/feaLib/lexer.c
Normal file
File diff suppressed because it is too large
Load diff
Binary file not shown.
287
venv/lib/python3.13/site-packages/fontTools/feaLib/lexer.py
Normal file
287
venv/lib/python3.13/site-packages/fontTools/feaLib/lexer.py
Normal file
|
|
@ -0,0 +1,287 @@
|
|||
from fontTools.feaLib.error import FeatureLibError, IncludedFeaNotFound
|
||||
from fontTools.feaLib.location import FeatureLibLocation
|
||||
import re
|
||||
import os
|
||||
|
||||
try:
|
||||
import cython
|
||||
except ImportError:
|
||||
# if cython not installed, use mock module with no-op decorators and types
|
||||
from fontTools.misc import cython
|
||||
|
||||
|
||||
class Lexer(object):
|
||||
NUMBER = "NUMBER"
|
||||
HEXADECIMAL = "HEXADECIMAL"
|
||||
OCTAL = "OCTAL"
|
||||
NUMBERS = (NUMBER, HEXADECIMAL, OCTAL)
|
||||
FLOAT = "FLOAT"
|
||||
STRING = "STRING"
|
||||
NAME = "NAME"
|
||||
FILENAME = "FILENAME"
|
||||
GLYPHCLASS = "GLYPHCLASS"
|
||||
CID = "CID"
|
||||
SYMBOL = "SYMBOL"
|
||||
COMMENT = "COMMENT"
|
||||
NEWLINE = "NEWLINE"
|
||||
ANONYMOUS_BLOCK = "ANONYMOUS_BLOCK"
|
||||
|
||||
CHAR_WHITESPACE_ = " \t"
|
||||
CHAR_NEWLINE_ = "\r\n"
|
||||
CHAR_SYMBOL_ = ",;:-+'{}[]<>()="
|
||||
CHAR_DIGIT_ = "0123456789"
|
||||
CHAR_HEXDIGIT_ = "0123456789ABCDEFabcdef"
|
||||
CHAR_LETTER_ = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
CHAR_NAME_START_ = CHAR_LETTER_ + "_+*:.^~!\\"
|
||||
CHAR_NAME_CONTINUATION_ = CHAR_LETTER_ + CHAR_DIGIT_ + "_.+*:^~!/-"
|
||||
|
||||
RE_GLYPHCLASS = re.compile(r"^[A-Za-z_0-9.\-]+$")
|
||||
|
||||
MODE_NORMAL_ = "NORMAL"
|
||||
MODE_FILENAME_ = "FILENAME"
|
||||
|
||||
def __init__(self, text, filename):
|
||||
self.filename_ = filename
|
||||
self.line_ = 1
|
||||
self.pos_ = 0
|
||||
self.line_start_ = 0
|
||||
self.text_ = text
|
||||
self.text_length_ = len(text)
|
||||
self.mode_ = Lexer.MODE_NORMAL_
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self): # Python 2
|
||||
return self.__next__()
|
||||
|
||||
def __next__(self): # Python 3
|
||||
while True:
|
||||
token_type, token, location = self.next_()
|
||||
if token_type != Lexer.NEWLINE:
|
||||
return (token_type, token, location)
|
||||
|
||||
def location_(self):
|
||||
column = self.pos_ - self.line_start_ + 1
|
||||
return FeatureLibLocation(self.filename_ or "<features>", self.line_, column)
|
||||
|
||||
def next_(self):
|
||||
self.scan_over_(Lexer.CHAR_WHITESPACE_)
|
||||
location = self.location_()
|
||||
start = self.pos_
|
||||
text = self.text_
|
||||
limit = len(text)
|
||||
if start >= limit:
|
||||
raise StopIteration()
|
||||
cur_char = text[start]
|
||||
next_char = text[start + 1] if start + 1 < limit else None
|
||||
|
||||
if cur_char == "\n":
|
||||
self.pos_ += 1
|
||||
self.line_ += 1
|
||||
self.line_start_ = self.pos_
|
||||
return (Lexer.NEWLINE, None, location)
|
||||
if cur_char == "\r":
|
||||
self.pos_ += 2 if next_char == "\n" else 1
|
||||
self.line_ += 1
|
||||
self.line_start_ = self.pos_
|
||||
return (Lexer.NEWLINE, None, location)
|
||||
if cur_char == "#":
|
||||
self.scan_until_(Lexer.CHAR_NEWLINE_)
|
||||
return (Lexer.COMMENT, text[start : self.pos_], location)
|
||||
|
||||
if self.mode_ is Lexer.MODE_FILENAME_:
|
||||
if cur_char != "(":
|
||||
raise FeatureLibError("Expected '(' before file name", location)
|
||||
self.scan_until_(")")
|
||||
cur_char = text[self.pos_] if self.pos_ < limit else None
|
||||
if cur_char != ")":
|
||||
raise FeatureLibError("Expected ')' after file name", location)
|
||||
self.pos_ += 1
|
||||
self.mode_ = Lexer.MODE_NORMAL_
|
||||
return (Lexer.FILENAME, text[start + 1 : self.pos_ - 1], location)
|
||||
|
||||
if cur_char == "\\" and next_char in Lexer.CHAR_DIGIT_:
|
||||
self.pos_ += 1
|
||||
self.scan_over_(Lexer.CHAR_DIGIT_)
|
||||
return (Lexer.CID, int(text[start + 1 : self.pos_], 10), location)
|
||||
if cur_char == "@":
|
||||
self.pos_ += 1
|
||||
self.scan_over_(Lexer.CHAR_NAME_CONTINUATION_)
|
||||
glyphclass = text[start + 1 : self.pos_]
|
||||
if len(glyphclass) < 1:
|
||||
raise FeatureLibError("Expected glyph class name", location)
|
||||
if not Lexer.RE_GLYPHCLASS.match(glyphclass):
|
||||
raise FeatureLibError(
|
||||
"Glyph class names must consist of letters, digits, "
|
||||
"underscore, period or hyphen",
|
||||
location,
|
||||
)
|
||||
return (Lexer.GLYPHCLASS, glyphclass, location)
|
||||
if cur_char in Lexer.CHAR_NAME_START_:
|
||||
self.pos_ += 1
|
||||
self.scan_over_(Lexer.CHAR_NAME_CONTINUATION_)
|
||||
token = text[start : self.pos_]
|
||||
if token == "include":
|
||||
self.mode_ = Lexer.MODE_FILENAME_
|
||||
return (Lexer.NAME, token, location)
|
||||
if cur_char == "0" and next_char in "xX":
|
||||
self.pos_ += 2
|
||||
self.scan_over_(Lexer.CHAR_HEXDIGIT_)
|
||||
return (Lexer.HEXADECIMAL, int(text[start : self.pos_], 16), location)
|
||||
if cur_char == "0" and next_char in Lexer.CHAR_DIGIT_:
|
||||
self.scan_over_(Lexer.CHAR_DIGIT_)
|
||||
return (Lexer.OCTAL, int(text[start : self.pos_], 8), location)
|
||||
if cur_char in Lexer.CHAR_DIGIT_:
|
||||
self.scan_over_(Lexer.CHAR_DIGIT_)
|
||||
if self.pos_ >= limit or text[self.pos_] != ".":
|
||||
return (Lexer.NUMBER, int(text[start : self.pos_], 10), location)
|
||||
self.scan_over_(".")
|
||||
self.scan_over_(Lexer.CHAR_DIGIT_)
|
||||
return (Lexer.FLOAT, float(text[start : self.pos_]), location)
|
||||
if cur_char == "-" and next_char in Lexer.CHAR_DIGIT_:
|
||||
self.pos_ += 1
|
||||
self.scan_over_(Lexer.CHAR_DIGIT_)
|
||||
if self.pos_ >= limit or text[self.pos_] != ".":
|
||||
return (Lexer.NUMBER, int(text[start : self.pos_], 10), location)
|
||||
self.scan_over_(".")
|
||||
self.scan_over_(Lexer.CHAR_DIGIT_)
|
||||
return (Lexer.FLOAT, float(text[start : self.pos_]), location)
|
||||
if cur_char in Lexer.CHAR_SYMBOL_:
|
||||
self.pos_ += 1
|
||||
return (Lexer.SYMBOL, cur_char, location)
|
||||
if cur_char == '"':
|
||||
self.pos_ += 1
|
||||
self.scan_until_('"')
|
||||
if self.pos_ < self.text_length_ and self.text_[self.pos_] == '"':
|
||||
self.pos_ += 1
|
||||
# strip newlines embedded within a string
|
||||
string = re.sub("[\r\n]", "", text[start + 1 : self.pos_ - 1])
|
||||
return (Lexer.STRING, string, location)
|
||||
else:
|
||||
raise FeatureLibError("Expected '\"' to terminate string", location)
|
||||
raise FeatureLibError("Unexpected character: %r" % cur_char, location)
|
||||
|
||||
def scan_over_(self, valid):
|
||||
p = self.pos_
|
||||
while p < self.text_length_ and self.text_[p] in valid:
|
||||
p += 1
|
||||
self.pos_ = p
|
||||
|
||||
def scan_until_(self, stop_at):
|
||||
p = self.pos_
|
||||
while p < self.text_length_ and self.text_[p] not in stop_at:
|
||||
p += 1
|
||||
self.pos_ = p
|
||||
|
||||
def scan_anonymous_block(self, tag):
|
||||
location = self.location_()
|
||||
tag = tag.strip()
|
||||
self.scan_until_(Lexer.CHAR_NEWLINE_)
|
||||
self.scan_over_(Lexer.CHAR_NEWLINE_)
|
||||
regexp = r"}\s*" + tag + r"\s*;"
|
||||
split = re.split(regexp, self.text_[self.pos_ :], maxsplit=1)
|
||||
if len(split) != 2:
|
||||
raise FeatureLibError(
|
||||
"Expected '} %s;' to terminate anonymous block" % tag, location
|
||||
)
|
||||
self.pos_ += len(split[0])
|
||||
return (Lexer.ANONYMOUS_BLOCK, split[0], location)
|
||||
|
||||
|
||||
class IncludingLexer(object):
|
||||
"""A Lexer that follows include statements.
|
||||
|
||||
The OpenType feature file specification states that due to
|
||||
historical reasons, relative imports should be resolved in this
|
||||
order:
|
||||
|
||||
1. If the source font is UFO format, then relative to the UFO's
|
||||
font directory
|
||||
2. relative to the top-level include file
|
||||
3. relative to the parent include file
|
||||
|
||||
We only support 1 (via includeDir) and 2.
|
||||
"""
|
||||
|
||||
def __init__(self, featurefile, *, includeDir=None):
|
||||
"""Initializes an IncludingLexer.
|
||||
|
||||
Behavior:
|
||||
If includeDir is passed, it will be used to determine the top-level
|
||||
include directory to use for all encountered include statements. If it is
|
||||
not passed, ``os.path.dirname(featurefile)`` will be considered the
|
||||
include directory.
|
||||
"""
|
||||
|
||||
self.lexers_ = [self.make_lexer_(featurefile)]
|
||||
self.featurefilepath = self.lexers_[0].filename_
|
||||
self.includeDir = includeDir
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self): # Python 2
|
||||
return self.__next__()
|
||||
|
||||
def __next__(self): # Python 3
|
||||
while self.lexers_:
|
||||
lexer = self.lexers_[-1]
|
||||
try:
|
||||
token_type, token, location = next(lexer)
|
||||
except StopIteration:
|
||||
self.lexers_.pop()
|
||||
continue
|
||||
if token_type is Lexer.NAME and token == "include":
|
||||
fname_type, fname_token, fname_location = lexer.next()
|
||||
if fname_type is not Lexer.FILENAME:
|
||||
raise FeatureLibError("Expected file name", fname_location)
|
||||
# semi_type, semi_token, semi_location = lexer.next()
|
||||
# if semi_type is not Lexer.SYMBOL or semi_token != ";":
|
||||
# raise FeatureLibError("Expected ';'", semi_location)
|
||||
if os.path.isabs(fname_token):
|
||||
path = fname_token
|
||||
else:
|
||||
if self.includeDir is not None:
|
||||
curpath = self.includeDir
|
||||
elif self.featurefilepath is not None:
|
||||
curpath = os.path.dirname(self.featurefilepath)
|
||||
else:
|
||||
# if the IncludingLexer was initialized from an in-memory
|
||||
# file-like stream, it doesn't have a 'name' pointing to
|
||||
# its filesystem path, therefore we fall back to using the
|
||||
# current working directory to resolve relative includes
|
||||
curpath = os.getcwd()
|
||||
path = os.path.join(curpath, fname_token)
|
||||
if len(self.lexers_) >= 5:
|
||||
raise FeatureLibError("Too many recursive includes", fname_location)
|
||||
try:
|
||||
self.lexers_.append(self.make_lexer_(path))
|
||||
except FileNotFoundError as err:
|
||||
raise IncludedFeaNotFound(fname_token, fname_location) from err
|
||||
else:
|
||||
return (token_type, token, location)
|
||||
raise StopIteration()
|
||||
|
||||
@staticmethod
|
||||
def make_lexer_(file_or_path):
|
||||
if hasattr(file_or_path, "read"):
|
||||
fileobj, closing = file_or_path, False
|
||||
else:
|
||||
filename, closing = file_or_path, True
|
||||
fileobj = open(filename, "r", encoding="utf-8-sig")
|
||||
data = fileobj.read()
|
||||
filename = getattr(fileobj, "name", None)
|
||||
if closing:
|
||||
fileobj.close()
|
||||
return Lexer(data, filename)
|
||||
|
||||
def scan_anonymous_block(self, tag):
|
||||
return self.lexers_[-1].scan_anonymous_block(tag)
|
||||
|
||||
|
||||
class NonIncludingLexer(IncludingLexer):
|
||||
"""Lexer that does not follow `include` statements, emits them as-is."""
|
||||
|
||||
def __next__(self): # Python 3
|
||||
return next(self.lexers_[0])
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
from typing import NamedTuple
|
||||
|
||||
|
||||
class FeatureLibLocation(NamedTuple):
|
||||
"""A location in a feature file"""
|
||||
|
||||
file: str
|
||||
line: int
|
||||
column: int
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.file}:{self.line}:{self.column}"
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
from typing import NamedTuple
|
||||
|
||||
LOOKUP_DEBUG_INFO_KEY = "com.github.fonttools.feaLib"
|
||||
LOOKUP_DEBUG_ENV_VAR = "FONTTOOLS_LOOKUP_DEBUGGING"
|
||||
|
||||
|
||||
class LookupDebugInfo(NamedTuple):
|
||||
"""Information about where a lookup came from, to be embedded in a font"""
|
||||
|
||||
location: str
|
||||
name: str
|
||||
feature: list
|
||||
2389
venv/lib/python3.13/site-packages/fontTools/feaLib/parser.py
Normal file
2389
venv/lib/python3.13/site-packages/fontTools/feaLib/parser.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,118 @@
|
|||
from fontTools.varLib.models import VariationModel, normalizeValue, piecewiseLinearMap
|
||||
|
||||
|
||||
def Location(loc):
|
||||
return tuple(sorted(loc.items()))
|
||||
|
||||
|
||||
class VariableScalar:
|
||||
"""A scalar with different values at different points in the designspace."""
|
||||
|
||||
def __init__(self, location_value={}):
|
||||
self.values = {}
|
||||
self.axes = {}
|
||||
for location, value in location_value.items():
|
||||
self.add_value(location, value)
|
||||
|
||||
def __repr__(self):
|
||||
items = []
|
||||
for location, value in self.values.items():
|
||||
loc = ",".join(
|
||||
[
|
||||
f"{ax}={int(coord) if float(coord).is_integer() else coord}"
|
||||
for ax, coord in location
|
||||
]
|
||||
)
|
||||
items.append("%s:%i" % (loc, value))
|
||||
return "(" + (" ".join(items)) + ")"
|
||||
|
||||
@property
|
||||
def does_vary(self):
|
||||
values = list(self.values.values())
|
||||
return any(v != values[0] for v in values[1:])
|
||||
|
||||
@property
|
||||
def axes_dict(self):
|
||||
if not self.axes:
|
||||
raise ValueError(
|
||||
".axes must be defined on variable scalar before interpolating"
|
||||
)
|
||||
return {ax.axisTag: ax for ax in self.axes}
|
||||
|
||||
def _normalized_location(self, location):
|
||||
location = self.fix_location(location)
|
||||
normalized_location = {}
|
||||
for axtag in location.keys():
|
||||
if axtag not in self.axes_dict:
|
||||
raise ValueError("Unknown axis %s in %s" % (axtag, location))
|
||||
axis = self.axes_dict[axtag]
|
||||
normalized_location[axtag] = normalizeValue(
|
||||
location[axtag], (axis.minValue, axis.defaultValue, axis.maxValue)
|
||||
)
|
||||
|
||||
return Location(normalized_location)
|
||||
|
||||
def fix_location(self, location):
|
||||
location = dict(location)
|
||||
for tag, axis in self.axes_dict.items():
|
||||
if tag not in location:
|
||||
location[tag] = axis.defaultValue
|
||||
return location
|
||||
|
||||
def add_value(self, location, value):
|
||||
if self.axes:
|
||||
location = self.fix_location(location)
|
||||
|
||||
self.values[Location(location)] = value
|
||||
|
||||
def fix_all_locations(self):
|
||||
self.values = {
|
||||
Location(self.fix_location(l)): v for l, v in self.values.items()
|
||||
}
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
self.fix_all_locations()
|
||||
key = Location({ax.axisTag: ax.defaultValue for ax in self.axes})
|
||||
if key not in self.values:
|
||||
raise ValueError("Default value could not be found")
|
||||
# I *guess* we could interpolate one, but I don't know how.
|
||||
return self.values[key]
|
||||
|
||||
def value_at_location(self, location, model_cache=None, avar=None):
|
||||
loc = Location(location)
|
||||
if loc in self.values.keys():
|
||||
return self.values[loc]
|
||||
values = list(self.values.values())
|
||||
loc = dict(self._normalized_location(loc))
|
||||
return self.model(model_cache, avar).interpolateFromMasters(loc, values)
|
||||
|
||||
def model(self, model_cache=None, avar=None):
|
||||
if model_cache is not None:
|
||||
key = tuple(self.values.keys())
|
||||
if key in model_cache:
|
||||
return model_cache[key]
|
||||
locations = [dict(self._normalized_location(k)) for k in self.values.keys()]
|
||||
if avar is not None:
|
||||
mapping = avar.segments
|
||||
locations = [
|
||||
{
|
||||
k: piecewiseLinearMap(v, mapping[k]) if k in mapping else v
|
||||
for k, v in location.items()
|
||||
}
|
||||
for location in locations
|
||||
]
|
||||
m = VariationModel(locations)
|
||||
if model_cache is not None:
|
||||
model_cache[key] = m
|
||||
return m
|
||||
|
||||
def get_deltas_and_supports(self, model_cache=None, avar=None):
|
||||
values = list(self.values.values())
|
||||
return self.model(model_cache, avar).getDeltasAndSupports(values)
|
||||
|
||||
def add_to_variation_store(self, store_builder, model_cache=None, avar=None):
|
||||
deltas, supports = self.get_deltas_and_supports(model_cache, avar)
|
||||
store_builder.setSupports(supports)
|
||||
index = store_builder.storeDeltas(deltas)
|
||||
return int(self.default), index
|
||||
Loading…
Add table
Add a link
Reference in a new issue