From fb4279293472b408eaa7ee403927a2717e8aa991 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Thu, 13 Feb 2020 10:10:36 +0100
Subject: [PATCH 01/63] Update README.md
---
README.md | 6 ------
1 file changed, 6 deletions(-)
diff --git a/README.md b/README.md
index ae42214..4cd22d1 100644
--- a/README.md
+++ b/README.md
@@ -62,12 +62,6 @@ Things I will work on next:
**[ ]** Approximate Nearest Neighbor Search for SentenceVectors
-**[ ]** Discrete Cosine Transform Embedding (?)
-
-**[ ]** VLAVE Embedding (?)
-
-**[ ]** PowerMeans Embedding (?)
-
Installation
From fe9af44ec887fad92ee05b07273907b154889957 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Thu, 13 Feb 2020 18:47:52 +0100
Subject: [PATCH 02/63] Code style fix
---
README.md | 1 +
fse/models/utils.py | 3 ++-
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 4cd22d1..aff4a04 100644
--- a/README.md
+++ b/README.md
@@ -2,6 +2,7 @@
+
diff --git a/fse/models/utils.py b/fse/models/utils.py
index 4567546..190f97f 100644
--- a/fse/models/utils.py
+++ b/fse/models/utils.py
@@ -76,9 +76,10 @@ def compute_principal_components(
current_mem = INF
sample_size = len(vectors)
- while 1:
+ while current_mem >= cache_size_gb:
current_mem = sample_size * vectors.shape[1] * dtype(REAL).itemsize / 1024 ** 3
if current_mem < cache_size_gb:
+ # Skip if full dataset is already smaller than cache_size_gb
break
sample_size *= 0.995
sample_size = int(sample_size)
From db4c0213aa423559847b9341679db17ad201e050 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Fri, 14 Feb 2020 16:40:29 +0100
Subject: [PATCH 03/63] Added file closing op
---
fse/test/test_average.py | 3 ++-
fse/test/test_base_s2v.py | 3 ++-
fse/test/test_sentencevectors.py | 3 ++-
fse/test/test_sif.py | 3 ++-
fse/test/test_usif.py | 3 ++-
5 files changed, 10 insertions(+), 5 deletions(-)
diff --git a/fse/test/test_average.py b/fse/test/test_average.py
index 35e8e9a..983ab86 100644
--- a/fse/test/test_average.py
+++ b/fse/test/test_average.py
@@ -25,7 +25,8 @@
CORPUS = Path("fse/test/test_data/test_sentences.txt")
DIM = 5
W2V = Word2Vec(min_count=1, size=DIM)
-SENTENCES = [l.split() for i, l in enumerate(open(CORPUS, "r"))]
+with open(CORPUS, "r") as f:
+ SENTENCES = [l.split() for i, l in enumerate(f)]
W2V.build_vocab(SENTENCES)
W2V.wv.vectors[:,] = np.arange(len(W2V.wv.vectors), dtype=np.float32)[:, None]
diff --git a/fse/test/test_base_s2v.py b/fse/test/test_base_s2v.py
index d9265ff..4193b30 100644
--- a/fse/test/test_base_s2v.py
+++ b/fse/test/test_base_s2v.py
@@ -27,7 +27,8 @@
CORPUS = Path("fse/test/test_data/test_sentences.txt")
DIM = 5
W2V = Word2Vec(min_count=1, size=DIM)
-SENTENCES = [l.split() for i, l in enumerate(open(CORPUS, "r"))]
+with open(CORPUS, "r") as f:
+ SENTENCES = [l.split() for i, l in enumerate(f)]
W2V.build_vocab(SENTENCES)
diff --git a/fse/test/test_sentencevectors.py b/fse/test/test_sentencevectors.py
index 00b7ba4..1fb18ec 100644
--- a/fse/test/test_sentencevectors.py
+++ b/fse/test/test_sentencevectors.py
@@ -26,7 +26,8 @@
CORPUS = Path("fse/test/test_data/test_sentences.txt")
DIM = 5
W2V = Word2Vec(min_count=1, size=DIM, seed=42)
-SENTENCES = [l.split() for l in open(CORPUS, "r")]
+with open(CORPUS, "r") as f:
+ SENTENCES = [l.split() for i, l in enumerate(f)]
W2V.build_vocab(SENTENCES)
np.random.seed(42)
W2V.wv.vectors = np.random.uniform(size=W2V.wv.vectors.shape).astype(np.float32)
diff --git a/fse/test/test_sif.py b/fse/test/test_sif.py
index a64f13f..d784b64 100644
--- a/fse/test/test_sif.py
+++ b/fse/test/test_sif.py
@@ -19,7 +19,8 @@
CORPUS = Path("fse/test/test_data/test_sentences.txt")
DIM = 50
W2V = Word2Vec(min_count=1, size=DIM)
-SENTENCES = [l.split() for l in open(CORPUS, "r")]
+with open(CORPUS, "r") as f:
+ SENTENCES = [l.split() for i, l in enumerate(f)]
W2V.build_vocab(SENTENCES)
diff --git a/fse/test/test_usif.py b/fse/test/test_usif.py
index e188b60..fe5320f 100644
--- a/fse/test/test_usif.py
+++ b/fse/test/test_usif.py
@@ -15,7 +15,8 @@
CORPUS = Path("fse/test/test_data/test_sentences.txt")
DIM = 50
W2V = Word2Vec(min_count=1, size=DIM)
-SENTENCES = [l.split() for l in open(CORPUS, "r")]
+with open(CORPUS, "r") as f:
+ SENTENCES = [l.split() for i, l in enumerate(f)]
W2V.build_vocab(SENTENCES)
From 296621a81645b9e063d0abcfb37c7b0a5ea0d326 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Fri, 14 Feb 2020 18:23:29 +0100
Subject: [PATCH 04/63] Fixed lgtm test exclusion
---
.lgtm.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.lgtm.yml b/.lgtm.yml
index a1b5814..9f206b4 100644
--- a/.lgtm.yml
+++ b/.lgtm.yml
@@ -1,6 +1,6 @@
path_classifiers:
test:
- - exclude: "**/test_*"
+ - test
extraction:
python:
From 48333b8d59ed02b24d2f0b5c181b36f026fe1fc7 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Fri, 14 Feb 2020 18:25:19 +0100
Subject: [PATCH 05/63] Fixed call to child in super __init__
---
fse/inputs.py | 18 +++++++++---------
fse/test/test_inputs.py | 24 ++++++++++++++++--------
2 files changed, 25 insertions(+), 17 deletions(-)
diff --git a/fse/inputs.py b/fse/inputs.py
index 5671c0e..d69d397 100644
--- a/fse/inputs.py
+++ b/fse/inputs.py
@@ -150,6 +150,9 @@ def __init__(self, *args:[list, set, ndarray], custom_index:[list, ndarray]):
"""
self.custom_index = custom_index
+ if len(args) > 1:
+ RuntimeError("Argument merging not supported")
+
super(CIndexedList, self).__init__(*args)
if len(self.items) != len(self.custom_index):
@@ -176,9 +179,6 @@ def insert(self, i:int, item:str):
def append(self, item:str):
raise NotImplementedError("Method currently not supported")
-
- def extend(self, arg:[list, set, ndarray]):
- raise NotImplementedError("Method currently not supported")
class SplitIndexedList(BaseIndexedList):
@@ -220,6 +220,9 @@ def __init__(self, *args:[list, set, ndarray], custom_index:[list, ndarray]):
"""
self.custom_index = custom_index
+ if len(args) > 1:
+ RuntimeError("Argument merging not supported")
+
super(SplitCIndexedList, self).__init__(*args)
if len(self.items) != len(self.custom_index):
@@ -248,9 +251,6 @@ def insert(self, i:int, item:str):
def append(self, item:str):
raise NotImplementedError("Method currently not supported")
- def extend(self, arg:[list, set, ndarray]):
- raise NotImplementedError("Method currently not supported")
-
class CSplitIndexedList(BaseIndexedList):
def __init__(self, *args:[list, set, ndarray], custom_split:callable):
@@ -296,6 +296,9 @@ def __init__(self, *args:[list, set, ndarray], custom_split:callable, custom_ind
"""
self.custom_split = custom_split
self.custom_index = custom_index
+
+ if len(args) > 1:
+ RuntimeError("Argument merging not supported")
super(CSplitCIndexedList, self).__init__(*args)
@@ -323,9 +326,6 @@ def insert(self, i:int, item:str):
def append(self, item:str):
raise NotImplementedError("Method currently not supported")
-
- def extend(self, arg:[list, set, ndarray]):
- raise NotImplementedError("Method currently not supported")
class IndexedLineDocument(object):
diff --git a/fse/test/test_inputs.py b/fse/test/test_inputs.py
index 259806c..97bef70 100644
--- a/fse/test/test_inputs.py
+++ b/fse/test/test_inputs.py
@@ -134,8 +134,10 @@ def test_mutable_funcs(self):
self.il.insert(0, "the")
with self.assertRaises(NotImplementedError):
self.il.append("the")
- with self.assertRaises(NotImplementedError):
- self.il.extend(["the", "dog"])
+
+ def test_arg_merging(self):
+ with self.assertRaises(RuntimeError):
+ CIndexedList(self.list_a, self.list_a, custom_index=[1, 1])
class TestCSplitIndexedList(unittest.TestCase):
@@ -168,8 +170,10 @@ def test_mutable_funcs(self):
self.il.insert(0, "the")
with self.assertRaises(NotImplementedError):
self.il.append("the")
- with self.assertRaises(NotImplementedError):
- self.il.extend(["the", "dog"])
+
+ def test_arg_merging(self):
+ with self.assertRaises(RuntimeError):
+ SplitCIndexedList(self.list_a, self.list_a, custom_index=[1, 1])
class TestCSplitCIndexedList(unittest.TestCase):
@@ -195,9 +199,13 @@ def test_mutable_funcs(self):
self.il.insert(0, "the")
with self.assertRaises(NotImplementedError):
self.il.append("the")
- with self.assertRaises(NotImplementedError):
- self.il.extend(["the", "dog"])
-
+
+ def test_arg_merging(self):
+ with self.assertRaises(RuntimeError):
+ CSplitCIndexedList(self.list_a, self.list_a,
+ custom_split=self.split_func,
+ custom_index=[1, 1]
+ )
class TestIndexedLineDocument(unittest.TestCase):
def setUp(self):
@@ -225,4 +233,4 @@ def test_yield(self):
logging.basicConfig(
format="%(asctime)s : %(levelname)s : %(message)s", level=logging.DEBUG
)
- unittest.main()
+ unittest.main()
\ No newline at end of file
From 8c21256447170e33a840b1bd414a3f52c9653d38 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 10:31:54 +0100
Subject: [PATCH 06/63] Added Pooling model
---
fse/models/pooling.py | 351 +++++++++++++++++++++++++++++++++++++++
fse/test/test_pooling.py | 326 ++++++++++++++++++++++++++++++++++++
2 files changed, 677 insertions(+)
create mode 100644 fse/models/pooling.py
create mode 100644 fse/test/test_pooling.py
diff --git a/fse/models/pooling.py b/fse/models/pooling.py
new file mode 100644
index 0000000..2f5c009
--- /dev/null
+++ b/fse/models/pooling.py
@@ -0,0 +1,351 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Author: Oliver Borchers
+# Copyright (C) 2019 Oliver Borchers
+
+"""This module implements the base class to compute Max Pooling representations for sentences, using highly optimized C routines,
+data streaming and Pythonic interfaces.
+
+The implementation is based on Shen et al. (2018): Baseline Needs More Love: On Simple Word-Embedding-Based Models and Associated Pooling Mechanisms.
+For more information, see .
+
+The training algorithms is based on the Gensim implementation of Word2Vec, FastText, and Doc2Vec.
+For more information, see: :class:`~gensim.models.word2vec.Word2Vec`, :class:`~gensim.models.fasttext.FastText`, or
+:class:`~gensim.models.doc2vec.Doc2Vec`.
+
+Initialize and train a :class:`~fse.models.pooling.MaxPooling` model
+
+.. sourcecode:: pycon
+
+ >>> from gensim.models.word2vec import Word2Vec
+ >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]]
+ >>> model = Word2Vec(sentences, min_count=1, size=20)
+
+ >>> from fse.models.pooling import MaxPooling
+ >>> avg = MaxPooling(model)
+ >>> avg.train([(s, i) for i, s in enumerate(sentences)])
+ >>> avg.sv.vectors.shape
+ (2, 20)
+
+"""
+
+from __future__ import division
+
+from fse.models.base_s2v import BaseSentence2VecModel
+
+from gensim.models.keyedvectors import BaseKeyedVectors
+from gensim.models.utils_any2vec import ft_ngram_hashes
+
+from numpy import (
+ ndarray,
+ float32 as REAL,
+ sum as np_sum,
+ multiply as np_mult,
+ zeros,
+ amax as np_amax,
+ maximum as np_maximum,
+)
+
+from typing import List
+
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+def train_pooling_np(
+ model: BaseSentence2VecModel,
+ indexed_sentences: List[tuple],
+ target: ndarray,
+ memory: ndarray,
+) -> [int, int]:
+ """Training on a sequence of sentences and update the target ndarray.
+
+ Called internally from :meth:`~fse.models.average.Average._do_train_job`.
+
+ Warnings
+ --------
+ This is the non-optimized, pure Python version. If you have a C compiler,
+ fse will use an optimized code path from :mod:`fse.models.average_inner` instead.
+
+ Parameters
+ ----------
+ model : :class:`~fse.models.base_s2v.BaseSentence2VecModel`
+ The BaseSentence2VecModel model instance.
+ indexed_sentences : iterable of tuple
+ The sentences used to train the model.
+ target : ndarray
+ The target ndarray. We use the index from indexed_sentences
+ to write into the corresponding row of target.
+ memory : ndarray
+ Private memory for each working thread
+
+ Returns
+ -------
+ int, int
+ Number of effective sentences (non-zero) and effective words in the vocabulary used
+ during training the sentence embedding.
+
+ """
+ size = model.wv.vector_size
+ vocab = model.wv.vocab
+
+ w_vectors = model.wv.vectors
+ w_weights = model.word_weights
+
+ s_vectors = target
+
+ is_ft = model.is_ft
+
+ mem = memory[0]
+
+ hierarchical = model.hierarchical
+ window = model.window_size
+
+ if is_ft:
+ # NOTE: For Fasttext: Use wv.vectors_vocab
+ # Using the wv.vectors from fasttext had horrible effects on the sts results
+ # I suspect this is because the wv.vectors are based on the averages of
+ # wv.vectors_vocab + wv.vectors_ngrams, which will all point into very
+ # similar directions.
+ max_ngrams = model.batch_ngrams
+ w_vectors = model.wv.vectors_vocab
+ ngram_vectors = model.wv.vectors_ngrams
+ min_n = model.wv.min_n
+ max_n = model.wv.max_n
+ bucket = model.wv.bucket
+ oov_weight = np_amax(w_weights)
+
+ def get_ft_vector(word:str) -> ndarray:
+ if word in vocab:
+ vocab_index = vocab[word].index
+ return w_vectors[vocab_index] * w_weights[vocab_index]
+ else:
+ ngram_hashes = ft_ngram_hashes(
+ word, min_n, max_n, bucket, True
+ )[:max_ngrams]
+ if len(ngram_hashes) == 0:
+ return zeros(size, dtype=REAL)
+ return (
+ oov_weight *
+ np_sum(ngram_vectors[ngram_hashes], axis=0)
+ / len(ngram_hashes)
+ )
+
+ eff_sentences, eff_words = 0, 0
+
+ if not is_ft:
+ for obj in indexed_sentences:
+ mem.fill(0.0)
+ sent = obj[0]
+ sent_adr = obj[1]
+
+ word_indices = [vocab[word].index for word in sent if word in vocab]
+ eff_sentences += 1
+ if not len(word_indices):
+ continue
+ eff_words += len(word_indices)
+
+ if not hierarchical:
+ # Take the maxium value along the axis
+ s_vectors[sent_adr] = np_amax(
+ np_mult(w_vectors[word_indices], w_weights[word_indices][:, None]),
+ axis=0,
+ )
+ else:
+ # More expensive iteration
+ for word_index, _ in enumerate(word_indices):
+ # Compute the local window
+ window_indices = word_indices[word_index : word_index + window]
+ # Perform average pooling [0,1,2,3,4]
+ mem = np_sum(
+ np_mult(
+ w_vectors[window_indices],
+ w_weights[window_indices][:, None],
+ ),
+ axis=0,
+ )
+ # Perform hierarchical max pooling
+ mem *= 1 / len(window_indices)
+ s_vectors[sent_adr] = np_maximum(s_vectors[sent_adr], mem,)
+ else:
+ for obj in indexed_sentences:
+ mem.fill(0.0)
+ sent = obj[0]
+ sent_adr = obj[1]
+
+ if not len(sent):
+ continue
+ mem = zeros(size, dtype=REAL)
+
+ eff_sentences += 1
+ eff_words += len(sent) # Counts everything in the sentence
+
+ if not hierarchical:
+ for word in sent:
+ if word in vocab:
+ vocab_index = vocab[word].index
+ s_vectors[sent_adr] = np_maximum(
+ get_ft_vector(word),
+ s_vectors[sent_adr],
+ )
+ else:
+ ngram_hashes = ft_ngram_hashes(
+ word, min_n, max_n, bucket, True
+ )[:max_ngrams]
+ if len(ngram_hashes) == 0:
+ continue
+ mem = oov_weight * (
+ np_sum(ngram_vectors[ngram_hashes], axis=0)
+ / len(ngram_hashes)
+ )
+
+ s_vectors[sent_adr] = np_maximum(
+ mem,
+ s_vectors[sent_adr],
+ )
+ else:
+ # Expensive iteration
+ for word_index, word in enumerate(sent):
+ mem.fill(0.0)
+ for context in sent[word_index : word_index + window]:
+ if word == context:
+ continue
+ print(context)
+
+
+
+ return eff_sentences, eff_words
+
+
+# try:
+# from fse.models.average_inner import train_average_cy
+# from fse.models.average_inner import (
+# FAST_VERSION,
+# MAX_WORDS_IN_BATCH,
+# MAX_NGRAMS_IN_BATCH,
+# )
+
+# train_average = train_average_cy
+# except ImportError
+
+FAST_VERSION = -1
+MAX_WORDS_IN_BATCH = 10000
+MAX_NGRAMS_IN_BATCH = 40
+train_pooling = train_pooling_np
+
+
+class MaxPooling(BaseSentence2VecModel):
+ """ Train, use and evaluate max pooling sentence vectors.
+
+ The model can be stored/loaded via its :meth:`~fse.models.pooling.MaxPooling.save` and
+ :meth:`~fse.models.pooling.MaxPooling.load` methods.
+
+ Some important attributes are the following:
+
+ Attributes
+ ----------
+ wv : :class:`~gensim.models.keyedvectors.BaseKeyedVectors`
+ This object essentially contains the mapping between words and embeddings. After training, it can be used
+ directly to query those embeddings in various ways. See the module level docstring for examples.
+
+ sv : :class:`~fse.models.sentencevectors.SentenceVectors`
+ This object contains the sentence vectors inferred from the training data. There will be one such vector
+ for each unique docusentence supplied during training. They may be individually accessed using the index.
+
+ prep : :class:`~fse.models.base_s2v.BaseSentence2VecPreparer`
+ The prep object is used to transform and initialize the sv.vectors. Aditionally, it can be used
+ to move the vectors to disk for training with memmap.
+
+ """
+
+ def __init__(
+ self,
+ model: BaseKeyedVectors,
+ hierarchical: bool = False,
+ window_size: int = 5,
+ sv_mapfile_path: str = None,
+ wv_mapfile_path: str = None,
+ workers: int = 1,
+ lang_freq: str = None,
+ **kwargs
+ ):
+ """ Max pooling sentence embeddings model. Performs a simple maximum pooling operation over all
+ words in a sentences without further transformations.
+
+ The implementation is based on Shen et al. (2018): Baseline Needs More Love: On Simple Word-Embedding-Based Models and Associated Pooling Mechanisms.
+ For more information, see .
+
+ Parameters
+ ----------
+ model : :class:`~gensim.models.keyedvectors.BaseKeyedVectors` or :class:`~gensim.models.base_any2vec.BaseWordEmbeddingsModel`
+ This object essentially contains the mapping between words and embeddings. To compute the sentence embeddings
+ the wv.vocab and wv.vector elements are required.
+ hierarchical : bool
+ If true, then perform hierarchical pooling operation
+ window_size : int
+ Set the size of the window used for hierarchical pooling operation
+ sv_mapfile_path : str, optional
+ Optional path to store the sentence-vectors in for very large datasets. Used for memmap.
+ wv_mapfile_path : str, optional
+ Optional path to store the word-vectors in for very large datasets. Used for memmap.
+ Use sv_mapfile_path and wv_mapfile_path to train disk-to-disk without needing much ram.
+ workers : int, optional
+ Number of working threads, used for multithreading. For most tasks (few words in a sentence)
+ a value of 1 should be more than enough.
+ lang_freq : str, optional
+ Some pre-trained embeddings, i.e. "GoogleNews-vectors-negative300.bin", do not contain information about
+ the frequency of a word. As the frequency is required for estimating the word weights, we induce
+ frequencies into the wv.vocab.count based on :class:`~wordfreq`
+ If no frequency information is available, you can choose the language to estimate the frequency.
+ See https://github.com/LuminosoInsight/wordfreq
+
+ """
+ self.hierarchical = bool(hierarchical)
+ self.window_size = int(window_size)
+
+ super(MaxPooling, self).__init__(
+ model=model,
+ sv_mapfile_path=sv_mapfile_path,
+ wv_mapfile_path=wv_mapfile_path,
+ workers=workers,
+ lang_freq=lang_freq,
+ batch_words=MAX_WORDS_IN_BATCH,
+ batch_ngrams=MAX_NGRAMS_IN_BATCH,
+ fast_version=FAST_VERSION,
+ )
+
+ def _do_train_job(
+ self, data_iterable: List[tuple], target: ndarray, memory: ndarray
+ ) -> [int, int]:
+ """ Internal routine which is called on training and performs averaging for all entries in the iterable """
+ eff_sentences, eff_words = train_pooling(
+ model=self, indexed_sentences=data_iterable, target=target, memory=memory,
+ )
+ return eff_sentences, eff_words
+
+ def _check_parameter_sanity(self, **kwargs):
+ """ Check the sanity of all child paramters """
+ if not all(self.word_weights == 1.0):
+ raise ValueError("All word weights must equal one for pool")
+ if self.window_size < 1:
+ raise ValueError("Window size must be greater than 1")
+
+ def _pre_train_calls(self, **kwargs):
+ """Function calls to perform before training """
+ pass
+
+ def _post_train_calls(self, **kwargs):
+ """ Function calls to perform after training, such as computing eigenvectors """
+ pass
+
+ def _post_inference_calls(self, **kwargs):
+ """ Function calls to perform after training & inference
+ Examples include the removal of components
+ """
+ pass
+
+ def _check_dtype_santiy(self, **kwargs):
+ """ Check the dtypes of all child attributes"""
+ pass
diff --git a/fse/test/test_pooling.py b/fse/test/test_pooling.py
new file mode 100644
index 0000000..d6c8ab6
--- /dev/null
+++ b/fse/test/test_pooling.py
@@ -0,0 +1,326 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Author: Oliver Borchers
+# Copyright (C) 2019 Oliver Borchers
+
+"""
+Automated tests for checking the average model.
+"""
+
+import logging
+import unittest
+
+from pathlib import Path
+
+import numpy as np
+
+from fse.models.pooling import MaxPooling, train_pooling_np
+from fse.models.base_s2v import EPS
+
+from gensim.models import Word2Vec, FastText
+
+logger = logging.getLogger(__name__)
+
+CORPUS = Path("fse/test/test_data/test_sentences.txt")
+DIM = 5
+W2V = Word2Vec(min_count=1, size=DIM)
+with open(CORPUS, "r") as f:
+ SENTENCES = [l.split() for i, l in enumerate(f)]
+W2V.build_vocab(SENTENCES)
+W2V.wv.vectors[:,] = np.arange(len(W2V.wv.vectors), dtype=np.float32)[:, None]
+
+
+class TestAverageFunctions(unittest.TestCase):
+ def setUp(self):
+ self.sentences = [
+ ["They", "admit"],
+ ["So", "Apple", "bought", "buds"],
+ ["go", "12345"],
+ ["pull", "12345678910111213"],
+ "this is a longer test sentence test longer sentences".split()
+ ]
+ self.sentences = [(s, i) for i, s in enumerate(self.sentences)]
+ self.model = MaxPooling(W2V)
+ self.model.prep.prepare_vectors(
+ sv=self.model.sv, total_sentences=len(self.sentences), update=False
+ )
+ self.model._pre_train_calls()
+
+ # def test_cython(self):
+ # from fse.models.average_inner import (
+ # FAST_VERSION,
+ # MAX_WORDS_IN_BATCH,
+ # MAX_NGRAMS_IN_BATCH,
+ # )
+
+ # self.assertTrue(FAST_VERSION)
+ # self.assertEqual(10000, MAX_WORDS_IN_BATCH)
+ # self.assertEqual(40, MAX_NGRAMS_IN_BATCH)
+
+ # def test_average_train_cy_w2v(self):
+ # self.model.sv.vectors = np.zeros_like(self.model.sv.vectors, dtype=np.float32)
+ # mem = self.model._get_thread_working_mem()
+
+ # from fse.models.average_inner import train_average_cy
+
+ # output = train_average_cy(
+ # self.model, self.sentences, self.model.sv.vectors, mem
+ # )
+ # self.assertEqual((4, 7), output)
+ # self.assertTrue((183 == self.model.sv[0]).all())
+ # self.assertTrue((164.5 == self.model.sv[1]).all())
+ # self.assertTrue((self.model.wv.vocab["go"].index == self.model.sv[2]).all())
+
+ # def test_average_train_cy_ft(self):
+ # ft = FastText(min_count=1, size=DIM)
+ # ft.build_vocab(SENTENCES)
+ # m = Average(ft)
+ # m.prep.prepare_vectors(
+ # sv=m.sv, total_sentences=len(self.sentences), update=False
+ # )
+ # m._pre_train_calls()
+ # m.wv.vectors = m.wv.vectors_vocab = np.ones_like(m.wv.vectors, dtype=np.float32)
+ # m.wv.vectors_ngrams = np.full_like(m.wv.vectors_ngrams, 2, dtype=np.float32)
+ # mem = m._get_thread_working_mem()
+
+ # from fse.models.average_inner import train_average_cy
+
+ # output = train_average_cy(m, self.sentences, m.sv.vectors, mem)
+ # self.assertEqual((4, 10), output)
+ # self.assertTrue((1.0 + EPS == m.sv[0]).all())
+ # self.assertTrue(np.allclose(1.5, m.sv[2]))
+ # self.assertTrue(np.allclose(2, m.sv[3]))
+
+ # def test_cy_equal_np_w2v(self):
+ # m1 = Average(W2V)
+ # m1.prep.prepare_vectors(
+ # sv=m1.sv, total_sentences=len(self.sentences), update=False
+ # )
+ # m1._pre_train_calls()
+ # mem1 = m1._get_thread_working_mem()
+ # o1 = train_average_np(m1, self.sentences, m1.sv.vectors, mem1)
+
+ # m2 = Average(W2V)
+ # m2.prep.prepare_vectors(
+ # sv=m2.sv, total_sentences=len(self.sentences), update=False
+ # )
+ # m2._pre_train_calls()
+ # mem2 = m2._get_thread_working_mem()
+
+ # from fse.models.average_inner import train_average_cy
+
+ # o2 = train_average_cy(m2, self.sentences, m2.sv.vectors, mem2)
+
+ # self.assertEqual(o1, o2)
+ # self.assertTrue((m1.sv.vectors == m2.sv.vectors).all())
+
+ # def test_cy_equal_np_w2v_random(self):
+ # w2v = Word2Vec(min_count=1, size=DIM)
+ # # Random initialization
+ # w2v.build_vocab(SENTENCES)
+
+ # m1 = Average(w2v)
+ # m1.prep.prepare_vectors(
+ # sv=m1.sv, total_sentences=len(self.sentences), update=False
+ # )
+ # m1._pre_train_calls()
+ # mem1 = m1._get_thread_working_mem()
+ # o1 = train_average_np(m1, self.sentences, m1.sv.vectors, mem1)
+
+ # m2 = Average(w2v)
+ # m2.prep.prepare_vectors(
+ # sv=m2.sv, total_sentences=len(self.sentences), update=False
+ # )
+ # m2._pre_train_calls()
+ # mem2 = m2._get_thread_working_mem()
+
+ # from fse.models.average_inner import train_average_cy
+
+ # o2 = train_average_cy(m2, self.sentences, m2.sv.vectors, mem2)
+
+ # self.assertTrue(np.allclose(m1.sv.vectors, m2.sv.vectors, atol=1e-6))
+
+ # def test_cy_equal_np_ft_random(self):
+ # ft = FastText(size=20, min_count=1)
+ # ft.build_vocab(SENTENCES)
+
+ # m1 = Average(ft)
+ # m1.prep.prepare_vectors(
+ # sv=m1.sv, total_sentences=len(self.sentences), update=False
+ # )
+ # m1._pre_train_calls()
+
+ # from fse.models.average_inner import MAX_NGRAMS_IN_BATCH
+
+ # m1.batch_ngrams = MAX_NGRAMS_IN_BATCH
+ # mem1 = m1._get_thread_working_mem()
+ # o1 = train_average_np(m1, self.sentences[:2], m1.sv.vectors, mem1)
+
+ # m2 = Average(ft)
+ # m2.prep.prepare_vectors(
+ # sv=m2.sv, total_sentences=len(self.sentences), update=False
+ # )
+ # m2._pre_train_calls()
+ # mem2 = m2._get_thread_working_mem()
+
+ # from fse.models.average_inner import train_average_cy
+
+ # o2 = train_average_cy(m2, self.sentences[:2], m2.sv.vectors, mem2)
+
+ # self.assertEqual(o1, o2)
+ # self.assertTrue(np.allclose(m1.sv.vectors, m2.sv.vectors, atol=1e-6))
+
+ # def test_train_single_from_disk(self):
+ # p = Path("fse/test/test_data/test_vecs")
+ # p_res = Path("fse/test/test_data/test_vecs.vectors")
+ # p_target = Path("fse/test/test_data/test_vecs_wv.vectors")
+
+ # se1 = Average(W2V)
+ # se2 = Average(
+ # W2V, sv_mapfile_path=str(p.absolute()), wv_mapfile_path=str(p.absolute())
+ # )
+ # se1.train([(s, i) for i, s in enumerate(SENTENCES)])
+ # se2.train([(s, i) for i, s in enumerate(SENTENCES)])
+
+ # self.assertTrue(p_target.exists())
+ # self.assertTrue((se1.wv.vectors == se2.wv.vectors).all())
+ # self.assertFalse(se2.wv.vectors.flags.writeable)
+
+ # self.assertTrue((se1.sv.vectors == se2.sv.vectors).all())
+ # p_res.unlink()
+ # p_target.unlink()
+
+ # def test_train_multi_from_disk(self):
+ # p = Path("fse/test/test_data/test_vecs")
+ # p_res = Path("fse/test/test_data/test_vecs.vectors")
+ # p_target = Path("fse/test/test_data/test_vecs_wv.vectors")
+
+ # se1 = Average(W2V, workers=2)
+ # se2 = Average(
+ # W2V,
+ # workers=2,
+ # sv_mapfile_path=str(p.absolute()),
+ # wv_mapfile_path=str(p.absolute()),
+ # )
+ # se1.train([(s, i) for i, s in enumerate(SENTENCES)])
+ # se2.train([(s, i) for i, s in enumerate(SENTENCES)])
+
+ # self.assertTrue(p_target.exists())
+ # self.assertTrue((se1.wv.vectors == se2.wv.vectors).all())
+ # self.assertFalse(se2.wv.vectors.flags.writeable)
+
+ # self.assertTrue((se1.sv.vectors == se2.sv.vectors).all())
+ # p_res.unlink()
+ # p_target.unlink()
+
+ def test_check_parameter_sanity(self):
+ se = MaxPooling(W2V)
+ se.word_weights = np.full(20, 2.0, dtype=np.float32)
+ with self.assertRaises(ValueError):
+ se._check_parameter_sanity()
+
+ se = MaxPooling(W2V, window_size=0)
+ with self.assertRaises(ValueError):
+ se._check_parameter_sanity()
+
+ def test_train(self):
+ self.assertEqual(
+ (100, 1450), self.model.train([(s, i) for i, s in enumerate(SENTENCES)])
+ )
+
+ def test_do_train_job(self):
+ self.model.prep.prepare_vectors(
+ sv=self.model.sv, total_sentences=len(SENTENCES), update=True
+ )
+ mem = self.model._get_thread_working_mem()
+ self.assertEqual(
+ (100, 1450),
+ self.model._do_train_job(
+ [(s, i) for i, s in enumerate(SENTENCES)],
+ target=self.model.sv.vectors,
+ memory=mem,
+ ),
+ )
+ self.assertEqual((105, DIM), self.model.sv.vectors.shape)
+
+ def test_pooling_train_np_w2v(self):
+ self.model.sv.vectors = np.zeros_like(self.model.sv.vectors, dtype=np.float32)
+ mem = self.model._get_thread_working_mem()
+
+ output = train_pooling_np(
+ self.model, self.sentences, self.model.sv.vectors, mem
+ )
+
+ self.assertEqual((5, 14), output)
+ self.assertTrue((241 == self.model.sv[0]).all())
+ self.assertTrue((306 == self.model.sv[1]).all())
+ self.assertTrue((self.model.wv.vocab["go"].index == self.model.sv[2]).all())
+
+ def test_hier_pooling_train_np_w2v(self):
+ self.model.sv.vectors = np.zeros_like(self.model.sv.vectors, dtype=np.float32)
+ mem = self.model._get_thread_working_mem()
+
+ self.model.hierarchical = True
+
+ output = train_pooling_np(
+ self.model, self.sentences, self.model.sv.vectors, mem
+ )
+ self.model.hierarchical = False
+
+ self.assertEqual((5, 14), output)
+ self.assertTrue((183 == self.model.sv[0]).all())
+ self.assertTrue(np.allclose(self.model.sv[4], 245.66667))
+
+ def test_pooling_train_np_ft(self):
+ ft = FastText(min_count=1, size=DIM)
+ ft.build_vocab(SENTENCES)
+ m = MaxPooling(ft)
+ m.prep.prepare_vectors(
+ sv=m.sv, total_sentences=len(self.sentences), update=False
+ )
+ m._pre_train_calls()
+
+ m.wv.vectors[:,] = np.arange(len(m.wv.vectors), dtype=np.float32)[:, None]
+ m.wv.vectors_vocab = m.wv.vectors
+
+ m.wv.vectors_ngrams[:,] = np.arange(len(m.wv.vectors_ngrams), dtype=np.float32)[:, None]
+ mem = m._get_thread_working_mem()
+
+ output = train_pooling_np(m, self.sentences, m.sv.vectors, mem)
+
+ self.assertEqual((5, 19), output)
+ self.assertTrue((241 == m.sv[0]).all())
+ self.assertTrue((737413.9 == m.sv[2]).all())
+ self.assertTrue((1080970.2 == m.sv[3]).all())
+
+ def test_hier_pooling_train_np_ft(self):
+ ft = FastText(min_count=1, size=DIM)
+ ft.build_vocab(SENTENCES)
+ m = MaxPooling(ft)
+ m.prep.prepare_vectors(
+ sv=m.sv, total_sentences=len(self.sentences), update=False
+ )
+ m._pre_train_calls()
+
+ m.wv.vectors[:,] = np.arange(len(m.wv.vectors), dtype=np.float32)[:, None]
+ m.wv.vectors_vocab = m.wv.vectors
+
+ m.wv.vectors_ngrams[:,] = np.arange(len(m.wv.vectors_ngrams), dtype=np.float32)[:, None]
+ mem = m._get_thread_working_mem()
+
+ m.hierarchical = True
+
+ output = train_pooling_np(m, self.sentences, m.sv.vectors, mem)
+
+ self.assertEqual((5, 19), output)
+ # self.assertTrue((241 == m.sv[0]).all())
+ # self.assertTrue((737413.9 == m.sv[2]).all())
+ # self.assertTrue((1080970.2 == m.sv[3]).all())
+
+
+if __name__ == "__main__":
+ logging.basicConfig(
+ format="%(asctime)s : %(levelname)s : %(message)s", level=logging.DEBUG
+ )
+ unittest.main()
From fdd9c9b14026739b15e2795a0c19c4192acc0e34 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 10:32:04 +0100
Subject: [PATCH 07/63] Added todo
---
fse/models/sentencevectors.py | 25 +++++++++++++++++++++++++
1 file changed, 25 insertions(+)
diff --git a/fse/models/sentencevectors.py b/fse/models/sentencevectors.py
index aa5fd95..8d2ddb0 100644
--- a/fse/models/sentencevectors.py
+++ b/fse/models/sentencevectors.py
@@ -50,6 +50,31 @@ def __init__(self, vector_size: int, mapfile_path: str = None):
self.mapfile_path = Path(mapfile_path) if mapfile_path is not None else None
self.mapfile_shape = None
+ """
+ Thoughts on additinal features:
+ ANN:
+ [ ] Only construct index when when calling most_similar method
+ [ ] Logging of index speed
+ [ ] Save and load of index
+ [ ] Assert that index and vectors are of equal size
+ [ ] Paramters must be tunable afterwards
+ [ ] Method to reconstruct index
+ [ ] How does the index saving comply with SaveLoad?
+ [ ] Write unittests?
+ Brute:
+ [ ] Keep access to default method
+ [ ] Make ANN Search the default?! --> Results?
+ [ ] Throw warning for large datasets for vector norm init
+ [ ] Maybe throw warning if exceeds RAM size of the embedding + normalization
+ Other:
+ [ ] L2 Distance
+ [ ] L1 Distance
+ [ ] Correlation
+ [ ] Lookup-Functionality (via defaultdict)
+ [ ] Get vector: Not really memory friendly
+ [ ] Show which words are in vocabulary
+ """
+
def __getitem__(self, entities: int) -> ndarray:
"""Get vector representation of `entities`.
From d59241067c291b53712ab2ad21765270d42ad388 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 10:32:15 +0100
Subject: [PATCH 08/63] Fixed n-to-m mapping in np
---
fse/models/average.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/fse/models/average.py b/fse/models/average.py
index d4874e6..83a447d 100644
--- a/fse/models/average.py
+++ b/fse/models/average.py
@@ -132,7 +132,7 @@ def train_average_np(
axis=0,
)
mem *= 1 / len(word_indices)
- s_vectors[sent_adr] = mem.astype(REAL)
+ s_vectors[sent_adr] += mem.astype(REAL)
else:
for obj in indexed_sentences:
mem.fill(0.0)
@@ -160,7 +160,7 @@ def train_average_np(
np_sum(ngram_vectors[ngram_hashes], axis=0) / len(ngram_hashes)
)
# Implicit addition of zero if oov does not contain any ngrams
- s_vectors[sent_adr] = mem / len(sent)
+ s_vectors[sent_adr] += mem / len(sent)
return eff_sentences, eff_words
From a533fa39e30aec397df1b515c3285c3ad176e537 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 11:10:35 +0100
Subject: [PATCH 09/63] Small fix to avg
---
fse/models/average.py | 2 +-
fse/test/test_average.py | 6 +++---
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/fse/models/average.py b/fse/models/average.py
index 83a447d..5522d11 100644
--- a/fse/models/average.py
+++ b/fse/models/average.py
@@ -131,7 +131,7 @@ def train_average_np(
np_mult(w_vectors[word_indices], w_weights[word_indices][:, None]),
axis=0,
)
- mem *= 1 / len(word_indices)
+ mem /= len(word_indices)
s_vectors[sent_adr] += mem.astype(REAL)
else:
for obj in indexed_sentences:
diff --git a/fse/test/test_average.py b/fse/test/test_average.py
index 983ab86..0bfd02f 100644
--- a/fse/test/test_average.py
+++ b/fse/test/test_average.py
@@ -95,9 +95,9 @@ def test_average_train_np_ft(self):
mem = m._get_thread_working_mem()
output = train_average_np(m, self.sentences, m.sv.vectors, mem)
self.assertEqual((4, 10), output)
- self.assertTrue((1.0 == m.sv[0]).all())
- self.assertTrue((1.5 == m.sv[2]).all())
- self.assertTrue((2 == m.sv[3]).all())
+ self.assertTrue(np.allclose(1.0, m.sv[0]))
+ self.assertTrue(np.allclose(1.5, m.sv[2]))
+ self.assertTrue(np.allclose(2, m.sv[3]))
# "go" -> [1,1...]
# oov: "12345" -> (14 hashes * 2) / 14 = 2
# (2 + 1) / 2 = 1.5
From 83a0c5a267f194b4a11fbeda345393461372286a Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 11:11:30 +0100
Subject: [PATCH 10/63] MaxPool work NP code
---
fse/models/pooling.py | 54 ++++++++++++++--------------------------
fse/test/test_pooling.py | 14 ++++++++---
2 files changed, 28 insertions(+), 40 deletions(-)
diff --git a/fse/models/pooling.py b/fse/models/pooling.py
index 2f5c009..0d788f3 100644
--- a/fse/models/pooling.py
+++ b/fse/models/pooling.py
@@ -122,6 +122,7 @@ def get_ft_vector(word:str) -> ndarray:
vocab_index = vocab[word].index
return w_vectors[vocab_index] * w_weights[vocab_index]
else:
+ # Requires additional temporary storage
ngram_hashes = ft_ngram_hashes(
word, min_n, max_n, bucket, True
)[:max_ngrams]
@@ -167,7 +168,7 @@ def get_ft_vector(word:str) -> ndarray:
axis=0,
)
# Perform hierarchical max pooling
- mem *= 1 / len(window_indices)
+ mem /= len(window_indices)
s_vectors[sent_adr] = np_maximum(s_vectors[sent_adr], mem,)
else:
for obj in indexed_sentences:
@@ -184,41 +185,30 @@ def get_ft_vector(word:str) -> ndarray:
if not hierarchical:
for word in sent:
- if word in vocab:
- vocab_index = vocab[word].index
- s_vectors[sent_adr] = np_maximum(
- get_ft_vector(word),
- s_vectors[sent_adr],
- )
- else:
- ngram_hashes = ft_ngram_hashes(
- word, min_n, max_n, bucket, True
- )[:max_ngrams]
- if len(ngram_hashes) == 0:
- continue
- mem = oov_weight * (
- np_sum(ngram_vectors[ngram_hashes], axis=0)
- / len(ngram_hashes)
- )
-
- s_vectors[sent_adr] = np_maximum(
- mem,
- s_vectors[sent_adr],
- )
+ s_vectors[sent_adr] = np_maximum(
+ get_ft_vector(word),
+ s_vectors[sent_adr],
+ )
else:
- # Expensive iteration
for word_index, word in enumerate(sent):
mem.fill(0.0)
+ mem += get_ft_vector(word)
+ count = 1
+
for context in sent[word_index : word_index + window]:
if word == context:
continue
- print(context)
-
+ mem += get_ft_vector(context)
+ count += 1
+ mem /= count
+ s_vectors[sent_adr] = np_maximum(
+ mem,
+ s_vectors[sent_adr],
+ )
return eff_sentences, eff_words
-
# try:
# from fse.models.average_inner import train_average_cy
# from fse.models.average_inner import (
@@ -268,7 +258,6 @@ def __init__(
sv_mapfile_path: str = None,
wv_mapfile_path: str = None,
workers: int = 1,
- lang_freq: str = None,
**kwargs
):
""" Max pooling sentence embeddings model. Performs a simple maximum pooling operation over all
@@ -293,14 +282,7 @@ def __init__(
Use sv_mapfile_path and wv_mapfile_path to train disk-to-disk without needing much ram.
workers : int, optional
Number of working threads, used for multithreading. For most tasks (few words in a sentence)
- a value of 1 should be more than enough.
- lang_freq : str, optional
- Some pre-trained embeddings, i.e. "GoogleNews-vectors-negative300.bin", do not contain information about
- the frequency of a word. As the frequency is required for estimating the word weights, we induce
- frequencies into the wv.vocab.count based on :class:`~wordfreq`
- If no frequency information is available, you can choose the language to estimate the frequency.
- See https://github.com/LuminosoInsight/wordfreq
-
+ a value of 1 should be more than enough.
"""
self.hierarchical = bool(hierarchical)
self.window_size = int(window_size)
@@ -310,7 +292,7 @@ def __init__(
sv_mapfile_path=sv_mapfile_path,
wv_mapfile_path=wv_mapfile_path,
workers=workers,
- lang_freq=lang_freq,
+ lang_freq=None,
batch_words=MAX_WORDS_IN_BATCH,
batch_ngrams=MAX_NGRAMS_IN_BATCH,
fast_version=FAST_VERSION,
diff --git a/fse/test/test_pooling.py b/fse/test/test_pooling.py
index d6c8ab6..f3bd75f 100644
--- a/fse/test/test_pooling.py
+++ b/fse/test/test_pooling.py
@@ -314,10 +314,16 @@ def test_hier_pooling_train_np_ft(self):
output = train_pooling_np(m, self.sentences, m.sv.vectors, mem)
self.assertEqual((5, 19), output)
- # self.assertTrue((241 == m.sv[0]).all())
- # self.assertTrue((737413.9 == m.sv[2]).all())
- # self.assertTrue((1080970.2 == m.sv[3]).all())
-
+ self.assertTrue((183 == m.sv[0]).all())
+ self.assertTrue((737413.9 == m.sv[2]).all())
+ self.assertTrue((1080970.2 == m.sv[3]).all())
+ """
+ Note to future self:
+ Due to the size of the ngram vectors,
+ an ngram at the last position of the senence
+ will always be the highest value.
+ TODO: This unittest is thus a bit flawed. Maybe fix?
+ """
if __name__ == "__main__":
logging.basicConfig(
From 5d6e885faa6e03e0ff870d577b101ecd5c75dfb1 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 11:18:11 +0100
Subject: [PATCH 11/63] Typing changes
---
fse/models/average.py | 8 ++++----
fse/models/base_s2v.py | 8 ++++----
fse/models/pooling.py | 23 ++++++++++++++++++-----
3 files changed, 26 insertions(+), 13 deletions(-)
diff --git a/fse/models/average.py b/fse/models/average.py
index 5522d11..02e2adc 100644
--- a/fse/models/average.py
+++ b/fse/models/average.py
@@ -57,7 +57,7 @@ def train_average_np(
model: BaseSentence2VecModel,
indexed_sentences: List[tuple],
target: ndarray,
- memory: ndarray,
+ memory: tuple,
) -> [int, int]:
"""Training on a sequence of sentences and update the target ndarray.
@@ -77,8 +77,8 @@ def train_average_np(
target : ndarray
The target ndarray. We use the index from indexed_sentences
to write into the corresponding row of target.
- memory : ndarray
- Private memory for each working thread
+ memory : tuple
+ Private memory array(s) for each working thread
Returns
-------
@@ -254,7 +254,7 @@ def __init__(
)
def _do_train_job(
- self, data_iterable: List[tuple], target: ndarray, memory: ndarray
+ self, data_iterable: List[tuple], target: ndarray, memory: tuple
) -> [int, int]:
""" Internal routine which is called on training and performs averaging for all entries in the iterable """
eff_sentences, eff_words = train_average(
diff --git a/fse/models/base_s2v.py b/fse/models/base_s2v.py
index 1846db8..5dc5b84 100644
--- a/fse/models/base_s2v.py
+++ b/fse/models/base_s2v.py
@@ -523,13 +523,13 @@ def _move_ndarray_to_disk(
readonly_memvecs = np_memmap(path, dtype=REAL, mode="r", shape=shape)
return readonly_memvecs
- def _get_thread_working_mem(self) -> [ndarray, ndarray]:
+ def _get_thread_working_mem(self) -> tuple:
"""Computes the memory used per worker thread.
Returns
-------
- np.ndarray
- Each worker threads private work memory.
+ tuple
+ Each worker threads private work memory array(s).
"""
mem = zeros_aligned(self.sv.vector_size, dtype=REAL)
@@ -537,7 +537,7 @@ def _get_thread_working_mem(self) -> [ndarray, ndarray]:
return (mem, oov_mem)
def _do_train_job(
- self, data_iterable: List[tuple], target: ndarray, memory: ndarray
+ self, data_iterable: List[tuple], target: ndarray, memory: tuple
) -> [int, int]:
""" Function to be called on a batch of sentences. Returns eff sentences/words """
raise NotImplementedError()
diff --git a/fse/models/pooling.py b/fse/models/pooling.py
index 0d788f3..a4bd46d 100644
--- a/fse/models/pooling.py
+++ b/fse/models/pooling.py
@@ -58,7 +58,7 @@ def train_pooling_np(
model: BaseSentence2VecModel,
indexed_sentences: List[tuple],
target: ndarray,
- memory: ndarray,
+ memory: tuple,
) -> [int, int]:
"""Training on a sequence of sentences and update the target ndarray.
@@ -67,7 +67,7 @@ def train_pooling_np(
Warnings
--------
This is the non-optimized, pure Python version. If you have a C compiler,
- fse will use an optimized code path from :mod:`fse.models.average_inner` instead.
+ fse will use an optimized code path from :mod:`fse.models.pooling_inner` instead.
Parameters
----------
@@ -78,8 +78,8 @@ def train_pooling_np(
target : ndarray
The target ndarray. We use the index from indexed_sentences
to write into the corresponding row of target.
- memory : ndarray
- Private memory for each working thread
+ memory : tuple
+ Private memory array(s) for each working thread
Returns
-------
@@ -118,6 +118,19 @@ def train_pooling_np(
oov_weight = np_amax(w_weights)
def get_ft_vector(word:str) -> ndarray:
+ """ Function to compute the FT vectors if applicable
+
+ Parameters
+ ----------
+ word : str
+ String representation of token
+
+ Returns
+ -------
+ ndarray
+ FT vector representation
+
+ """
if word in vocab:
vocab_index = vocab[word].index
return w_vectors[vocab_index] * w_weights[vocab_index]
@@ -299,7 +312,7 @@ def __init__(
)
def _do_train_job(
- self, data_iterable: List[tuple], target: ndarray, memory: ndarray
+ self, data_iterable: List[tuple], target: ndarray, memory: tuple
) -> [int, int]:
""" Internal routine which is called on training and performs averaging for all entries in the iterable """
eff_sentences, eff_words = train_pooling(
From 00b33cb75fec3fc2bf41e47b2b28aa50fc9110f0 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 11:47:40 +0100
Subject: [PATCH 12/63] Added pooling
---
fse/models/__init__.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/fse/models/__init__.py b/fse/models/__init__.py
index 3f13790..38f90b4 100644
--- a/fse/models/__init__.py
+++ b/fse/models/__init__.py
@@ -1,4 +1,5 @@
from .average import Average
from .sif import SIF
from .usif import uSIF
-from .sentencevectors import SentenceVectors
+from .pooling import MaxPooling
+from .sentencevectors import SentenceVectors
\ No newline at end of file
From 976df4460ff297d2ab89b9a820a1aea3f0f44aec Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 11:47:51 +0100
Subject: [PATCH 13/63] Fixed non-negative bug + tests
---
fse/models/pooling.py | 3 ++-
fse/test/test_pooling.py | 49 +++++++++++++++++++++++++++-------------
2 files changed, 35 insertions(+), 17 deletions(-)
diff --git a/fse/models/pooling.py b/fse/models/pooling.py
index a4bd46d..470be00 100644
--- a/fse/models/pooling.py
+++ b/fse/models/pooling.py
@@ -163,10 +163,11 @@ def get_ft_vector(word:str) -> ndarray:
if not hierarchical:
# Take the maxium value along the axis
- s_vectors[sent_adr] = np_amax(
+ mem = np_amax(
np_mult(w_vectors[word_indices], w_weights[word_indices][:, None]),
axis=0,
)
+ s_vectors[sent_adr] = np_maximum(s_vectors[sent_adr], mem,)
else:
# More expensive iteration
for word_index, _ in enumerate(word_indices):
diff --git a/fse/test/test_pooling.py b/fse/test/test_pooling.py
index f3bd75f..1503a53 100644
--- a/fse/test/test_pooling.py
+++ b/fse/test/test_pooling.py
@@ -30,6 +30,17 @@
W2V.build_vocab(SENTENCES)
W2V.wv.vectors[:,] = np.arange(len(W2V.wv.vectors), dtype=np.float32)[:, None]
+W2V_R = Word2Vec(min_count=1, size=DIM)
+W2V_R.build_vocab(SENTENCES)
+
+FT = FastText(min_count=1, size=DIM)
+FT.build_vocab(SENTENCES)
+FT.wv.vectors[:,] = np.arange(len(FT.wv.vectors), dtype=np.float32)[:, None]
+FT.wv.vectors_vocab = FT.wv.vectors
+FT.wv.vectors_ngrams[:,] = np.arange(len(FT.wv.vectors_ngrams), dtype=np.float32)[:, None]
+
+FT_R = FastText(min_count=1, size=DIM)
+FT_R.build_vocab(SENTENCES)
class TestAverageFunctions(unittest.TestCase):
def setUp(self):
@@ -257,6 +268,11 @@ def test_pooling_train_np_w2v(self):
self.assertTrue((306 == self.model.sv[1]).all())
self.assertTrue((self.model.wv.vocab["go"].index == self.model.sv[2]).all())
+ def test_pooling_train_np_w2v_non_negative(self):
+ mpool = MaxPooling(W2V_R)
+ mpool.train(self.sentences)
+ self.assertTrue((mpool.sv.vectors >= 0).all())
+
def test_hier_pooling_train_np_w2v(self):
self.model.sv.vectors = np.zeros_like(self.model.sv.vectors, dtype=np.float32)
mem = self.model._get_thread_working_mem()
@@ -272,19 +288,17 @@ def test_hier_pooling_train_np_w2v(self):
self.assertTrue((183 == self.model.sv[0]).all())
self.assertTrue(np.allclose(self.model.sv[4], 245.66667))
+ def test_hpooling_train_np_w2v_non_negative(self):
+ mpool = MaxPooling(W2V_R, hierarchical=True)
+ mpool.train(self.sentences)
+ self.assertTrue((mpool.sv.vectors >= 0).all())
+
def test_pooling_train_np_ft(self):
- ft = FastText(min_count=1, size=DIM)
- ft.build_vocab(SENTENCES)
- m = MaxPooling(ft)
+ m = MaxPooling(FT)
m.prep.prepare_vectors(
sv=m.sv, total_sentences=len(self.sentences), update=False
)
m._pre_train_calls()
-
- m.wv.vectors[:,] = np.arange(len(m.wv.vectors), dtype=np.float32)[:, None]
- m.wv.vectors_vocab = m.wv.vectors
-
- m.wv.vectors_ngrams[:,] = np.arange(len(m.wv.vectors_ngrams), dtype=np.float32)[:, None]
mem = m._get_thread_working_mem()
output = train_pooling_np(m, self.sentences, m.sv.vectors, mem)
@@ -294,19 +308,17 @@ def test_pooling_train_np_ft(self):
self.assertTrue((737413.9 == m.sv[2]).all())
self.assertTrue((1080970.2 == m.sv[3]).all())
+ def test_pooling_train_np_ft_non_negative(self):
+ mpool = MaxPooling(FT_R)
+ mpool.train(self.sentences)
+ self.assertTrue((mpool.sv.vectors >= 0).all())
+
def test_hier_pooling_train_np_ft(self):
- ft = FastText(min_count=1, size=DIM)
- ft.build_vocab(SENTENCES)
- m = MaxPooling(ft)
+ m = MaxPooling(FT)
m.prep.prepare_vectors(
sv=m.sv, total_sentences=len(self.sentences), update=False
)
m._pre_train_calls()
-
- m.wv.vectors[:,] = np.arange(len(m.wv.vectors), dtype=np.float32)[:, None]
- m.wv.vectors_vocab = m.wv.vectors
-
- m.wv.vectors_ngrams[:,] = np.arange(len(m.wv.vectors_ngrams), dtype=np.float32)[:, None]
mem = m._get_thread_working_mem()
m.hierarchical = True
@@ -325,6 +337,11 @@ def test_hier_pooling_train_np_ft(self):
TODO: This unittest is thus a bit flawed. Maybe fix?
"""
+ def test_hier_pooling_train_np_ft_non_negative(self):
+ mpool = MaxPooling(FT_R, hierarchical=True)
+ mpool.train(self.sentences)
+ self.assertTrue((mpool.sv.vectors >= 0).all())
+
if __name__ == "__main__":
logging.basicConfig(
format="%(asctime)s : %(levelname)s : %(message)s", level=logging.DEBUG
From fb1d55b9afdb064f3dcc3a6dcbefb51d09a867ca Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 12:16:28 +0100
Subject: [PATCH 14/63] Updated readme
---
README.md | 31 +++++++++++++++++++------------
1 file changed, 19 insertions(+), 12 deletions(-)
diff --git a/README.md b/README.md
index aff4a04..986f12f 100644
--- a/README.md
+++ b/README.md
@@ -20,16 +20,14 @@ Find the corresponding blog post(s) here:
- [Visualizing 100,000 Amazon Products](https://towardsdatascience.com/vis-amz-83dea6fcb059)
- [Sentence Embeddings. Fast, please!](https://towardsdatascience.com/fse-2b1ffa791cf9)
-- **Announcment: Please understand, that I am at the end of my PhD and I do not have many free minutes to fix issues or add features.**
-
-**fse** implements three algorithms for sentence embeddings. You can choose
-between *unweighted sentence averages*, *smooth inverse frequency averages*, and *unsupervised smooth inverse frequency averages*.
+**fse** implements five algorithms for sentence embeddings. You can choose
+between *unweighted sentence averages*, *smooth inverse frequency averages*, *unsupervised smooth inverse frequency averages*, *max pooling*, and *hierarchical max pooling*.
Key features of **fse** are:
**[X]** Up to 500.000 sentences / second (1)
-**[X]** Supports Average, SIF, and uSIF Embeddings
+**[X]** Supports Average, SIF, uSIF, MaxPooling, and Hierarchical MaxPooling Embeddings
**[X]** Full support for Gensims Word2Vec and all other compatible classes
@@ -51,19 +49,14 @@ Key features of **fse** are:
**[X]** Extensive documentation of all functions
+**[X]** Extensive unittest for Linux/OSX
+
**[X]** Optimized Input Classes
(1) May vary significantly from system to system (i.e. by using swap memory) and processing.
I regularly observe 300k-500k sentences/s for preprocessed data on my Macbook (2016).
Visit **Tutorial.ipynb** for an example.
-Things I will work on next:
-
-**[ ]** MaxPooling / Hierarchical Pooling Embedding
-
-**[ ]** Approximate Nearest Neighbor Search for SentenceVectors
-
-
Installation
------------
@@ -107,6 +100,7 @@ The models presented are based on
- Deep-averaging embeddings [1]
- Smooth inverse frequency embeddings [2]
- Unsupervised smooth inverse frequency embeddings [3]
+- MaxPooling / Hierarchical MaxPooling [5]
Credits to Radim Řehůřek and all contributors for the **awesome** library
and code that [Gensim](https://github.com/RaRe-Technologies/gensim) provides. A whole lot of the code found in this lib is based on Gensim.
@@ -156,6 +150,17 @@ Model | [STS Benchmark](http://ixa2.si.ehu.es/stswiki/index.php/STSbenchmark#Re
Changelog
-------------
+0.1.16 from 0.1.15:
+- Added MaxPooling / Hierarchical MaxPooling
+- Added features to Sentencevectors
+- Added further unittests
+- Workaround for Numpy memmap issue (https://github.com/numpy/numpy/issues/13172)
+- Bugfixes for python 3.8 builds
+- Code refactoring to black style
+- SVD ram subsampling for SIF / uSIF
+- Minor fixes for nan-handling
+- Minor fix for sentencevectors class
+
0.1.15 from 0.1.11:
- Fixed major FT Ngram computation bug
- Rewrote the input class. Turns out NamedTuple was pretty slow.
@@ -181,6 +186,8 @@ Proceedings of the 3rd Workshop on Representation Learning for NLP. (Toulon, Fra
4. Eneko Agirre, Daniel Cer, Mona Diab, Iñigo Lopez-Gazpio, Lucia Specia. Semeval-2017 Task 1: Semantic Textual Similarity Multilingual and Crosslingual Focused Evaluation. Proceedings of SemEval 2017.
+5. Dinghan Shen, Guoyin Wang, Wenlin Wang, Martin Renqiang Min, Qinliang Su, Yizhe Zhang, Chunyuan Li, Ricardo Henao, Lawrence Carin (2018) Baseline Needs More Love: On Simple Word-Embedding-Based Models and Associated Pooling Mechanisms. ACL 2018.
+
Copyright
-------------
From e8a7bd990566cacd8bb4c72963a8e90009fe8e38 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 13:04:43 +0100
Subject: [PATCH 15/63] Corrected comments
---
fse/models/pooling.py | 2 +-
fse/test/test_pooling.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/fse/models/pooling.py b/fse/models/pooling.py
index 470be00..93851d1 100644
--- a/fse/models/pooling.py
+++ b/fse/models/pooling.py
@@ -315,7 +315,7 @@ def __init__(
def _do_train_job(
self, data_iterable: List[tuple], target: ndarray, memory: tuple
) -> [int, int]:
- """ Internal routine which is called on training and performs averaging for all entries in the iterable """
+ """ Internal routine which is called on training and performs pooling for all entries in the iterable """
eff_sentences, eff_words = train_pooling(
model=self, indexed_sentences=data_iterable, target=target, memory=memory,
)
diff --git a/fse/test/test_pooling.py b/fse/test/test_pooling.py
index 1503a53..1179bbf 100644
--- a/fse/test/test_pooling.py
+++ b/fse/test/test_pooling.py
@@ -5,7 +5,7 @@
# Copyright (C) 2019 Oliver Borchers
"""
-Automated tests for checking the average model.
+Automated tests for checking the model.
"""
import logging
From 5c94d5cff5513da785c7bbd16713da6658024c78 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Sun, 16 Feb 2020 13:06:55 +0100
Subject: [PATCH 16/63] Black formatting
---
fse/models/__init__.py | 2 +-
fse/models/pooling.py | 21 +++++++++------------
fse/test/test_inputs.py | 15 +++++++++------
fse/test/test_pooling.py | 18 +++++++++++-------
4 files changed, 30 insertions(+), 26 deletions(-)
diff --git a/fse/models/__init__.py b/fse/models/__init__.py
index 38f90b4..d3e0f92 100644
--- a/fse/models/__init__.py
+++ b/fse/models/__init__.py
@@ -2,4 +2,4 @@
from .sif import SIF
from .usif import uSIF
from .pooling import MaxPooling
-from .sentencevectors import SentenceVectors
\ No newline at end of file
+from .sentencevectors import SentenceVectors
diff --git a/fse/models/pooling.py b/fse/models/pooling.py
index 93851d1..a14f4e4 100644
--- a/fse/models/pooling.py
+++ b/fse/models/pooling.py
@@ -117,7 +117,7 @@ def train_pooling_np(
bucket = model.wv.bucket
oov_weight = np_amax(w_weights)
- def get_ft_vector(word:str) -> ndarray:
+ def get_ft_vector(word: str) -> ndarray:
""" Function to compute the FT vectors if applicable
Parameters
@@ -136,14 +136,14 @@ def get_ft_vector(word:str) -> ndarray:
return w_vectors[vocab_index] * w_weights[vocab_index]
else:
# Requires additional temporary storage
- ngram_hashes = ft_ngram_hashes(
- word, min_n, max_n, bucket, True
- )[:max_ngrams]
+ ngram_hashes = ft_ngram_hashes(word, min_n, max_n, bucket, True)[
+ :max_ngrams
+ ]
if len(ngram_hashes) == 0:
return zeros(size, dtype=REAL)
return (
- oov_weight *
- np_sum(ngram_vectors[ngram_hashes], axis=0)
+ oov_weight
+ * np_sum(ngram_vectors[ngram_hashes], axis=0)
/ len(ngram_hashes)
)
@@ -200,8 +200,7 @@ def get_ft_vector(word:str) -> ndarray:
if not hierarchical:
for word in sent:
s_vectors[sent_adr] = np_maximum(
- get_ft_vector(word),
- s_vectors[sent_adr],
+ get_ft_vector(word), s_vectors[sent_adr],
)
else:
for word_index, word in enumerate(sent):
@@ -216,13 +215,11 @@ def get_ft_vector(word:str) -> ndarray:
count += 1
mem /= count
- s_vectors[sent_adr] = np_maximum(
- mem,
- s_vectors[sent_adr],
- )
+ s_vectors[sent_adr] = np_maximum(mem, s_vectors[sent_adr],)
return eff_sentences, eff_words
+
# try:
# from fse.models.average_inner import train_average_cy
# from fse.models.average_inner import (
diff --git a/fse/test/test_inputs.py b/fse/test/test_inputs.py
index 97bef70..7aec523 100644
--- a/fse/test/test_inputs.py
+++ b/fse/test/test_inputs.py
@@ -134,7 +134,7 @@ def test_mutable_funcs(self):
self.il.insert(0, "the")
with self.assertRaises(NotImplementedError):
self.il.append("the")
-
+
def test_arg_merging(self):
with self.assertRaises(RuntimeError):
CIndexedList(self.list_a, self.list_a, custom_index=[1, 1])
@@ -170,7 +170,7 @@ def test_mutable_funcs(self):
self.il.insert(0, "the")
with self.assertRaises(NotImplementedError):
self.il.append("the")
-
+
def test_arg_merging(self):
with self.assertRaises(RuntimeError):
SplitCIndexedList(self.list_a, self.list_a, custom_index=[1, 1])
@@ -199,14 +199,17 @@ def test_mutable_funcs(self):
self.il.insert(0, "the")
with self.assertRaises(NotImplementedError):
self.il.append("the")
-
+
def test_arg_merging(self):
with self.assertRaises(RuntimeError):
- CSplitCIndexedList(self.list_a, self.list_a,
+ CSplitCIndexedList(
+ self.list_a,
+ self.list_a,
custom_split=self.split_func,
- custom_index=[1, 1]
+ custom_index=[1, 1],
)
+
class TestIndexedLineDocument(unittest.TestCase):
def setUp(self):
self.p = "fse/test/test_data/test_sentences.txt"
@@ -233,4 +236,4 @@ def test_yield(self):
logging.basicConfig(
format="%(asctime)s : %(levelname)s : %(message)s", level=logging.DEBUG
)
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/fse/test/test_pooling.py b/fse/test/test_pooling.py
index 1179bbf..519df98 100644
--- a/fse/test/test_pooling.py
+++ b/fse/test/test_pooling.py
@@ -37,11 +37,14 @@
FT.build_vocab(SENTENCES)
FT.wv.vectors[:,] = np.arange(len(FT.wv.vectors), dtype=np.float32)[:, None]
FT.wv.vectors_vocab = FT.wv.vectors
-FT.wv.vectors_ngrams[:,] = np.arange(len(FT.wv.vectors_ngrams), dtype=np.float32)[:, None]
+FT.wv.vectors_ngrams[:,] = np.arange(len(FT.wv.vectors_ngrams), dtype=np.float32)[
+ :, None
+]
FT_R = FastText(min_count=1, size=DIM)
FT_R.build_vocab(SENTENCES)
+
class TestAverageFunctions(unittest.TestCase):
def setUp(self):
self.sentences = [
@@ -49,7 +52,7 @@ def setUp(self):
["So", "Apple", "bought", "buds"],
["go", "12345"],
["pull", "12345678910111213"],
- "this is a longer test sentence test longer sentences".split()
+ "this is a longer test sentence test longer sentences".split(),
]
self.sentences = [(s, i) for i, s in enumerate(self.sentences)]
self.model = MaxPooling(W2V)
@@ -230,16 +233,16 @@ def test_check_parameter_sanity(self):
se.word_weights = np.full(20, 2.0, dtype=np.float32)
with self.assertRaises(ValueError):
se._check_parameter_sanity()
-
+
se = MaxPooling(W2V, window_size=0)
with self.assertRaises(ValueError):
se._check_parameter_sanity()
-
+
def test_train(self):
self.assertEqual(
(100, 1450), self.model.train([(s, i) for i, s in enumerate(SENTENCES)])
)
-
+
def test_do_train_job(self):
self.model.prep.prepare_vectors(
sv=self.model.sv, total_sentences=len(SENTENCES), update=True
@@ -272,13 +275,13 @@ def test_pooling_train_np_w2v_non_negative(self):
mpool = MaxPooling(W2V_R)
mpool.train(self.sentences)
self.assertTrue((mpool.sv.vectors >= 0).all())
-
+
def test_hier_pooling_train_np_w2v(self):
self.model.sv.vectors = np.zeros_like(self.model.sv.vectors, dtype=np.float32)
mem = self.model._get_thread_working_mem()
self.model.hierarchical = True
-
+
output = train_pooling_np(
self.model, self.sentences, self.model.sv.vectors, mem
)
@@ -342,6 +345,7 @@ def test_hier_pooling_train_np_ft_non_negative(self):
mpool.train(self.sentences)
self.assertTrue((mpool.sv.vectors >= 0).all())
+
if __name__ == "__main__":
logging.basicConfig(
format="%(asctime)s : %(levelname)s : %(message)s", level=logging.DEBUG
From 727efb2621e6f03cf0d6c85011463e1d5e227a5f Mon Sep 17 00:00:00 2001
From: OliverB
Date: Tue, 18 Feb 2020 12:08:07 +0100
Subject: [PATCH 17/63] Updated readme
---
README.md | 40 +++++++++++++++++++++-------------------
1 file changed, 21 insertions(+), 19 deletions(-)
diff --git a/README.md b/README.md
index 986f12f..7754ea1 100644
--- a/README.md
+++ b/README.md
@@ -81,6 +81,25 @@ If building the Cython extension fails (you will be notified), try:
Usage
-------------
+In order to use **fse** you must first estimate a Gensim model which contains a
+gensim.models.keyedvectors.BaseKeyedVectors class, for example
+*Word2Vec* or *Fasttext*. Then you can proceed to compute sentence embeddings
+for a corpus.
+
+ from gensim.models import FastText
+ sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]]
+ ft = FastText(sentences, min_count=1, size=10)
+
+ from fse.models import Average
+ from fse import IndexedList
+ model = Average(ft)
+ model.train(IndexedList(sentences))
+
+ model.sv.similarity(0,1)
+
+fse offers multi-thread support out of the box. However, for most
+applications a *single thread will most likely be sufficient*.
+
Within the folder nootebooks you can find the following guides:
**Tutorial.ipynb** offers a detailed walk-through of some of the most important functions fse has to offer.
@@ -105,30 +124,13 @@ The models presented are based on
Credits to Radim Řehůřek and all contributors for the **awesome** library
and code that [Gensim](https://github.com/RaRe-Technologies/gensim) provides. A whole lot of the code found in this lib is based on Gensim.
-In order to use **fse** you must first estimate a Gensim model which contains a
-gensim.models.keyedvectors.BaseKeyedVectors class, for example
-*Word2Vec* or *Fasttext*. Then you can proceed to compute sentence embeddings
-for a corpus.
-
- from gensim.models import FastText
- sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]]
- ft = FastText(sentences, min_count=1, size=10)
-
- from fse.models import Average
- from fse import IndexedList
- model = Average(ft)
- model.train(IndexedList(sentences))
-
- model.sv.similarity(0,1)
-
-fse offers multi-thread support out of the box. However, for most
-applications a *single thread will most likely be sufficient*.
-
To install **fse** on Colab, check out: https://colab.research.google.com/drive/1qq9GBgEosG7YSRn7r6e02T9snJb04OEi
Results
------------
+Note: Though some models perform very good on the sentence-similarty-task (STS), this does not imply good performance in other donwstream tasks!
+
Model | [STS Benchmark](http://ixa2.si.ehu.es/stswiki/index.php/STSbenchmark#Results)
:---: | :---:
`CBOW-Paranmt` | **79.85**
From 4f936d8f6f2986af8b7faaf5c3c34a0235fd9d40 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Tue, 18 Feb 2020 12:10:55 +0100
Subject: [PATCH 18/63] Updated readme
---
README.md | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/README.md b/README.md
index 7754ea1..8fcde07 100644
--- a/README.md
+++ b/README.md
@@ -78,6 +78,8 @@ If building the Cython extension fails (you will be notified), try:
pip install -U git+https://github.com/oborchers/Fast_Sentence_Embeddings
+To install **fse** on Colab, check out: https://colab.research.google.com/drive/1qq9GBgEosG7YSRn7r6e02T9snJb04OEi
+
Usage
-------------
@@ -124,8 +126,6 @@ The models presented are based on
Credits to Radim Řehůřek and all contributors for the **awesome** library
and code that [Gensim](https://github.com/RaRe-Technologies/gensim) provides. A whole lot of the code found in this lib is based on Gensim.
-To install **fse** on Colab, check out: https://colab.research.google.com/drive/1qq9GBgEosG7YSRn7r6e02T9snJb04OEi
-
Results
------------
@@ -159,7 +159,7 @@ Changelog
- Workaround for Numpy memmap issue (https://github.com/numpy/numpy/issues/13172)
- Bugfixes for python 3.8 builds
- Code refactoring to black style
-- SVD ram subsampling for SIF / uSIF
+- SVD ram subsampling for SIF / uSIF (customizable, standard is 1 GB of RAM)
- Minor fixes for nan-handling
- Minor fix for sentencevectors class
From cc31045ed60fdac16f2fd4270fe4b993aa56463a Mon Sep 17 00:00:00 2001
From: OliverB
Date: Tue, 18 Feb 2020 12:14:57 +0100
Subject: [PATCH 19/63] Added todos
---
fse/models/base_s2v.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/fse/models/base_s2v.py b/fse/models/base_s2v.py
index 5dc5b84..3dc4b42 100644
--- a/fse/models/base_s2v.py
+++ b/fse/models/base_s2v.py
@@ -131,13 +131,14 @@ def __init__(
[ ] windows support
[ ] documentation
[ ] more benchmarks
+ [ ] more speed benchmarks -> worker support still somewhat weird
[ ] remove wv_mapfile_path?
[ ] modifiable sv_mapfile_path?
[ ] models:
[ ] check feasibility first
- [ ] max-pooling -> easy
- [ ] hierarchical pooling -> easy
+ [X] max-pooling -> easy
+ [X] hierarchical pooling -> easy
[ ] discrete cosine transform -> somewhat easy, questionable
[ ] valve -> unclear, not cited enough
[ ] power-means embedding -> very large dimensionalty
From 1b144aca213b483682e23dbabd26a82d54bd6d19 Mon Sep 17 00:00:00 2001
From: OliverB
Date: Wed, 26 Feb 2020 17:57:24 +0100
Subject: [PATCH 20/63] First MaxPool Implementation
---
fse/models/average_inner.c | 10722 +++++++++++++++++++++++++++++++++
fse/models/average_inner.pxd | 12 +-
fse/models/average_inner.pyx | 7 +-
fse/models/base_s2v.py | 9 +-
fse/models/pooling.py | 30 +-
fse/models/pooling_inner.c | 8350 +++++++++++++++++++++++++
fse/models/pooling_inner.pyx | 211 +
fse/test/test_base_s2v.py | 7 +
fse/test/test_pooling.py | 154 +-
9 files changed, 19406 insertions(+), 96 deletions(-)
create mode 100644 fse/models/average_inner.c
create mode 100644 fse/models/pooling_inner.c
create mode 100644 fse/models/pooling_inner.pyx
diff --git a/fse/models/average_inner.c b/fse/models/average_inner.c
new file mode 100644
index 0000000..91211b6
--- /dev/null
+++ b/fse/models/average_inner.c
@@ -0,0 +1,10722 @@
+/* Generated by Cython 0.29.14 */
+
+/* BEGIN: Cython Metadata
+{
+ "distutils": {
+ "depends": [
+ "voidptr.h"
+ ],
+ "extra_compile_args": [
+ "-O2",
+ "-march=native"
+ ],
+ "name": "average_inner",
+ "sources": [
+ "average_inner.pyx"
+ ]
+ },
+ "module_name": "average_inner"
+}
+END: Cython Metadata */
+
+#define PY_SSIZE_T_CLEAN
+#include "Python.h"
+#ifndef Py_PYTHON_H
+ #error Python headers needed to compile C extensions, please install development version of Python.
+#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
+ #error Cython requires Python 2.6+ or Python 3.3+.
+#else
+#define CYTHON_ABI "0_29_14"
+#define CYTHON_HEX_VERSION 0x001D0EF0
+#define CYTHON_FUTURE_DIVISION 0
+#include
+#ifndef offsetof
+ #define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+ #ifndef __stdcall
+ #define __stdcall
+ #endif
+ #ifndef __cdecl
+ #define __cdecl
+ #endif
+ #ifndef __fastcall
+ #define __fastcall
+ #endif
+#endif
+#ifndef DL_IMPORT
+ #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+ #define DL_EXPORT(t) t
+#endif
+#define __PYX_COMMA ,
+#ifndef HAVE_LONG_LONG
+ #if PY_VERSION_HEX >= 0x02070000
+ #define HAVE_LONG_LONG
+ #endif
+#endif
+#ifndef PY_LONG_LONG
+ #define PY_LONG_LONG LONG_LONG
+#endif
+#ifndef Py_HUGE_VAL
+ #define Py_HUGE_VAL HUGE_VAL
+#endif
+#ifdef PYPY_VERSION
+ #define CYTHON_COMPILING_IN_PYPY 1
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #undef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 0
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #if PY_VERSION_HEX < 0x03050000
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #undef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 1
+ #undef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 0
+ #undef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 0
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+#elif defined(PYSTON_VERSION)
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 1
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+#else
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 1
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
+ #define CYTHON_USE_PYTYPE_LOOKUP 1
+ #endif
+ #if PY_MAJOR_VERSION < 3
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #elif !defined(CYTHON_USE_PYLONG_INTERNALS)
+ #define CYTHON_USE_PYLONG_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #if PY_VERSION_HEX < 0x030300F0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #elif !defined(CYTHON_USE_UNICODE_WRITER)
+ #define CYTHON_USE_UNICODE_WRITER 1
+ #endif
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #ifndef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 1
+ #endif
+ #ifndef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 1
+ #endif
+ #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
+ #endif
+ #ifndef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
+ #endif
+ #ifndef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
+ #endif
+ #ifndef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)
+ #endif
+#endif
+#if !defined(CYTHON_FAST_PYCCALL)
+#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
+#endif
+#if CYTHON_USE_PYLONG_INTERNALS
+ #include "longintrepr.h"
+ #undef SHIFT
+ #undef BASE
+ #undef MASK
+ #ifdef SIZEOF_VOID_P
+ enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) };
+ #endif
+#endif
+#ifndef __has_attribute
+ #define __has_attribute(x) 0
+#endif
+#ifndef __has_cpp_attribute
+ #define __has_cpp_attribute(x) 0
+#endif
+#ifndef CYTHON_RESTRICT
+ #if defined(__GNUC__)
+ #define CYTHON_RESTRICT __restrict__
+ #elif defined(_MSC_VER) && _MSC_VER >= 1400
+ #define CYTHON_RESTRICT __restrict
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_RESTRICT restrict
+ #else
+ #define CYTHON_RESTRICT
+ #endif
+#endif
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+#endif
+#ifndef CYTHON_MAYBE_UNUSED_VAR
+# if defined(__cplusplus)
+ template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
+# else
+# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
+# endif
+#endif
+#ifndef CYTHON_NCP_UNUSED
+# if CYTHON_COMPILING_IN_CPYTHON
+# define CYTHON_NCP_UNUSED
+# else
+# define CYTHON_NCP_UNUSED CYTHON_UNUSED
+# endif
+#endif
+#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
+#ifdef _MSC_VER
+ #ifndef _MSC_STDINT_H_
+ #if _MSC_VER < 1300
+ typedef unsigned char uint8_t;
+ typedef unsigned int uint32_t;
+ #else
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int32 uint32_t;
+ #endif
+ #endif
+#else
+ #include
+#endif
+#ifndef CYTHON_FALLTHROUGH
+ #if defined(__cplusplus) && __cplusplus >= 201103L
+ #if __has_cpp_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH [[fallthrough]]
+ #elif __has_cpp_attribute(clang::fallthrough)
+ #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
+ #elif __has_cpp_attribute(gnu::fallthrough)
+ #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #endif
+ #endif
+ #ifndef CYTHON_FALLTHROUGH
+ #if __has_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH __attribute__((fallthrough))
+ #else
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+ #if defined(__clang__ ) && defined(__apple_build_version__)
+ #if __apple_build_version__ < 7000000
+ #undef CYTHON_FALLTHROUGH
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+#endif
+
+#ifndef CYTHON_INLINE
+ #if defined(__clang__)
+ #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
+ #elif defined(__GNUC__)
+ #define CYTHON_INLINE __inline__
+ #elif defined(_MSC_VER)
+ #define CYTHON_INLINE __inline
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_INLINE inline
+ #else
+ #define CYTHON_INLINE
+ #endif
+#endif
+
+#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
+ #define Py_OptimizeFlag 0
+#endif
+#define __PYX_BUILD_PY_SSIZE_T "n"
+#define CYTHON_FORMAT_SSIZE_T "z"
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+ #define __Pyx_DefaultClassType PyClass_Type
+#else
+ #define __Pyx_BUILTIN_MODULE_NAME "builtins"
+#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+#else
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+#endif
+ #define __Pyx_DefaultClassType PyType_Type
+#endif
+#ifndef Py_TPFLAGS_CHECKTYPES
+ #define Py_TPFLAGS_CHECKTYPES 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_INDEX
+ #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
+ #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_FINALIZE
+ #define Py_TPFLAGS_HAVE_FINALIZE 0
+#endif
+#ifndef METH_STACKLESS
+ #define METH_STACKLESS 0
+#endif
+#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
+ #ifndef METH_FASTCALL
+ #define METH_FASTCALL 0x80
+ #endif
+ typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
+ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames);
+#else
+ #define __Pyx_PyCFunctionFast _PyCFunctionFast
+ #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
+#endif
+#if CYTHON_FAST_PYCCALL
+#define __Pyx_PyFastCFunction_Check(func)\
+ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)))))
+#else
+#define __Pyx_PyFastCFunction_Check(func) 0
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
+ #define PyObject_Malloc(s) PyMem_Malloc(s)
+ #define PyObject_Free(p) PyMem_Free(p)
+ #define PyObject_Realloc(p) PyMem_Realloc(p)
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1
+ #define PyMem_RawMalloc(n) PyMem_Malloc(n)
+ #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n)
+ #define PyMem_RawFree(p) PyMem_Free(p)
+#endif
+#if CYTHON_COMPILING_IN_PYSTON
+ #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
+#else
+ #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
+#endif
+#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#elif PY_VERSION_HEX >= 0x03060000
+ #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
+#elif PY_VERSION_HEX >= 0x03000000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#else
+ #define __Pyx_PyThreadState_Current _PyThreadState_Current
+#endif
+#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
+#include "pythread.h"
+#define Py_tss_NEEDS_INIT 0
+typedef int Py_tss_t;
+static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
+ *key = PyThread_create_key();
+ return 0;
+}
+static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
+ Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
+ *key = Py_tss_NEEDS_INIT;
+ return key;
+}
+static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
+ PyObject_Free(key);
+}
+static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
+ return *key != Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
+ PyThread_delete_key(*key);
+ *key = Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
+ return PyThread_set_key_value(*key, value);
+}
+static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
+ return PyThread_get_key_value(*key);
+}
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
+#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
+#else
+#define __Pyx_PyDict_NewPresized(n) PyDict_New()
+#endif
+#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
+#else
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
+#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
+#else
+#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
+#endif
+#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
+ #define CYTHON_PEP393_ENABLED 1
+ #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
+ 0 : _PyUnicode_Ready((PyObject *)(op)))
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
+ #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
+ #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
+ #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
+#else
+ #define CYTHON_PEP393_ENABLED 0
+ #define PyUnicode_1BYTE_KIND 1
+ #define PyUnicode_2BYTE_KIND 2
+ #define PyUnicode_4BYTE_KIND 4
+ #define __Pyx_PyUnicode_READY(op) (0)
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111)
+ #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
+ #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
+ #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
+#endif
+#if CYTHON_COMPILING_IN_PYPY
+ #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
+#else
+ #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\
+ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)
+ #define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check)
+ #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)
+ #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
+#endif
+#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
+#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
+#else
+ #define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
+#endif
+#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
+ #define PyObject_ASCII(o) PyObject_Repr(o)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBaseString_Type PyUnicode_Type
+ #define PyStringObject PyUnicodeObject
+ #define PyString_Type PyUnicode_Type
+ #define PyString_Check PyUnicode_Check
+ #define PyString_CheckExact PyUnicode_CheckExact
+ #define PyObject_Unicode PyObject_Str
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
+ #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
+#else
+ #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
+ #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
+#endif
+#ifndef PySet_CheckExact
+ #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
+#endif
+#if CYTHON_ASSUME_SAFE_MACROS
+ #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
+#else
+ #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyIntObject PyLongObject
+ #define PyInt_Type PyLong_Type
+ #define PyInt_Check(op) PyLong_Check(op)
+ #define PyInt_CheckExact(op) PyLong_CheckExact(op)
+ #define PyInt_FromString PyLong_FromString
+ #define PyInt_FromUnicode PyLong_FromUnicode
+ #define PyInt_FromLong PyLong_FromLong
+ #define PyInt_FromSize_t PyLong_FromSize_t
+ #define PyInt_FromSsize_t PyLong_FromSsize_t
+ #define PyInt_AsLong PyLong_AsLong
+ #define PyInt_AS_LONG PyLong_AS_LONG
+ #define PyInt_AsSsize_t PyLong_AsSsize_t
+ #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
+ #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
+ #define PyNumber_Int PyNumber_Long
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBoolObject PyLongObject
+#endif
+#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
+ #ifndef PyUnicode_InternFromString
+ #define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
+ #endif
+#endif
+#if PY_VERSION_HEX < 0x030200A4
+ typedef long Py_hash_t;
+ #define __Pyx_PyInt_FromHash_t PyInt_FromLong
+ #define __Pyx_PyInt_AsHash_t PyInt_AsLong
+#else
+ #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
+ #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))
+#else
+ #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
+#endif
+#if CYTHON_USE_ASYNC_SLOTS
+ #if PY_VERSION_HEX >= 0x030500B1
+ #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
+ #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
+ #else
+ #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
+ #endif
+#else
+ #define __Pyx_PyType_AsAsync(obj) NULL
+#endif
+#ifndef __Pyx_PyAsyncMethodsStruct
+ typedef struct {
+ unaryfunc am_await;
+ unaryfunc am_aiter;
+ unaryfunc am_anext;
+ } __Pyx_PyAsyncMethodsStruct;
+#endif
+
+#if defined(WIN32) || defined(MS_WINDOWS)
+ #define _USE_MATH_DEFINES
+#endif
+#include
+#ifdef NAN
+#define __PYX_NAN() ((float) NAN)
+#else
+static CYTHON_INLINE float __PYX_NAN() {
+ float value;
+ memset(&value, 0xFF, sizeof(value));
+ return value;
+}
+#endif
+#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)
+#define __Pyx_truncl trunc
+#else
+#define __Pyx_truncl truncl
+#endif
+
+
+#define __PYX_ERR(f_index, lineno, Ln_error) \
+{ \
+ __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
+}
+
+#ifndef __PYX_EXTERN_C
+ #ifdef __cplusplus
+ #define __PYX_EXTERN_C extern "C"
+ #else
+ #define __PYX_EXTERN_C extern
+ #endif
+#endif
+
+#define __PYX_HAVE__average_inner
+#define __PYX_HAVE_API__average_inner
+/* Early includes */
+#include
+#include
+#include "numpy/arrayobject.h"
+#include "numpy/ufuncobject.h"
+#include "voidptr.h"
+#ifdef _OPENMP
+#include
+#endif /* _OPENMP */
+
+#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)
+#define CYTHON_WITHOUT_ASSERTIONS
+#endif
+
+typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
+ const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
+
+#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
+#define __PYX_DEFAULT_STRING_ENCODING ""
+#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
+#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#define __Pyx_uchar_cast(c) ((unsigned char)c)
+#define __Pyx_long_cast(x) ((long)x)
+#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\
+ (sizeof(type) < sizeof(Py_ssize_t)) ||\
+ (sizeof(type) > sizeof(Py_ssize_t) &&\
+ likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX) &&\
+ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\
+ v == (type)PY_SSIZE_T_MIN))) ||\
+ (sizeof(type) == sizeof(Py_ssize_t) &&\
+ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX))) )
+static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {
+ return (size_t) i < (size_t) limit;
+}
+#if defined (__cplusplus) && __cplusplus >= 201103L
+ #include
+ #define __Pyx_sst_abs(value) std::abs(value)
+#elif SIZEOF_INT >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) abs(value)
+#elif SIZEOF_LONG >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) labs(value)
+#elif defined (_MSC_VER)
+ #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
+#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define __Pyx_sst_abs(value) llabs(value)
+#elif defined (__GNUC__)
+ #define __Pyx_sst_abs(value) __builtin_llabs(value)
+#else
+ #define __Pyx_sst_abs(value) ((value<0) ? -value : value)
+#endif
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
+#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
+#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
+#define __Pyx_PyBytes_FromString PyBytes_FromString
+#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#else
+ #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
+#endif
+#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
+#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
+#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
+#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
+#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
+static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
+ const Py_UNICODE *u_end = u;
+ while (*u_end++) ;
+ return (size_t)(u_end - u - 1);
+}
+#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
+#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
+#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
+#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)
+#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)
+static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b);
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
+static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*);
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
+#define __Pyx_PySequence_Tuple(obj)\
+ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
+#if CYTHON_ASSUME_SAFE_MACROS
+#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
+#else
+#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
+#endif
+#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))
+#else
+#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))
+#endif
+#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+static int __Pyx_sys_getdefaultencoding_not_ascii;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ PyObject* ascii_chars_u = NULL;
+ PyObject* ascii_chars_b = NULL;
+ const char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ if (strcmp(default_encoding_c, "ascii") == 0) {
+ __Pyx_sys_getdefaultencoding_not_ascii = 0;
+ } else {
+ char ascii_chars[128];
+ int c;
+ for (c = 0; c < 128; c++) {
+ ascii_chars[c] = c;
+ }
+ __Pyx_sys_getdefaultencoding_not_ascii = 1;
+ ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
+ if (!ascii_chars_u) goto bad;
+ ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
+ if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
+ PyErr_Format(
+ PyExc_ValueError,
+ "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
+ default_encoding_c);
+ goto bad;
+ }
+ Py_DECREF(ascii_chars_u);
+ Py_DECREF(ascii_chars_b);
+ }
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ Py_XDECREF(ascii_chars_u);
+ Py_XDECREF(ascii_chars_b);
+ return -1;
+}
+#endif
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
+#else
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+static char* __PYX_DEFAULT_STRING_ENCODING;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1);
+ if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;
+ strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ return -1;
+}
+#endif
+#endif
+
+
+/* Test for GCC > 2.95 */
+#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))
+ #define likely(x) __builtin_expect(!!(x), 1)
+ #define unlikely(x) __builtin_expect(!!(x), 0)
+#else /* !__GNUC__ or GCC < 2.95 */
+ #define likely(x) (x)
+ #define unlikely(x) (x)
+#endif /* __GNUC__ */
+static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }
+
+static PyObject *__pyx_m = NULL;
+static PyObject *__pyx_d;
+static PyObject *__pyx_b;
+static PyObject *__pyx_cython_runtime = NULL;
+static PyObject *__pyx_empty_tuple;
+static PyObject *__pyx_empty_bytes;
+static PyObject *__pyx_empty_unicode;
+static int __pyx_lineno;
+static int __pyx_clineno = 0;
+static const char * __pyx_cfilenm= __FILE__;
+static const char *__pyx_filename;
+
+/* Header.proto */
+#if !defined(CYTHON_CCOMPLEX)
+ #if defined(__cplusplus)
+ #define CYTHON_CCOMPLEX 1
+ #elif defined(_Complex_I)
+ #define CYTHON_CCOMPLEX 1
+ #else
+ #define CYTHON_CCOMPLEX 0
+ #endif
+#endif
+#if CYTHON_CCOMPLEX
+ #ifdef __cplusplus
+ #include
+ #else
+ #include
+ #endif
+#endif
+#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__)
+ #undef _Complex_I
+ #define _Complex_I 1.0fj
+#endif
+
+
+static const char *__pyx_f[] = {
+ "average_inner.pyx",
+ "__init__.pxd",
+ "type.pxd",
+};
+/* NoFastGil.proto */
+#define __Pyx_PyGILState_Ensure PyGILState_Ensure
+#define __Pyx_PyGILState_Release PyGILState_Release
+#define __Pyx_FastGIL_Remember()
+#define __Pyx_FastGIL_Forget()
+#define __Pyx_FastGilFuncInit()
+
+/* ForceInitThreads.proto */
+#ifndef __PYX_FORCE_INIT_THREADS
+ #define __PYX_FORCE_INIT_THREADS 0
+#endif
+
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":776
+ * # in Cython to enable them only on the right systems.
+ *
+ * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<<
+ * ctypedef npy_int16 int16_t
+ * ctypedef npy_int32 int32_t
+ */
+typedef npy_int8 __pyx_t_5numpy_int8_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":777
+ *
+ * ctypedef npy_int8 int8_t
+ * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<<
+ * ctypedef npy_int32 int32_t
+ * ctypedef npy_int64 int64_t
+ */
+typedef npy_int16 __pyx_t_5numpy_int16_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":778
+ * ctypedef npy_int8 int8_t
+ * ctypedef npy_int16 int16_t
+ * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<<
+ * ctypedef npy_int64 int64_t
+ * #ctypedef npy_int96 int96_t
+ */
+typedef npy_int32 __pyx_t_5numpy_int32_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":779
+ * ctypedef npy_int16 int16_t
+ * ctypedef npy_int32 int32_t
+ * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<<
+ * #ctypedef npy_int96 int96_t
+ * #ctypedef npy_int128 int128_t
+ */
+typedef npy_int64 __pyx_t_5numpy_int64_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":783
+ * #ctypedef npy_int128 int128_t
+ *
+ * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<<
+ * ctypedef npy_uint16 uint16_t
+ * ctypedef npy_uint32 uint32_t
+ */
+typedef npy_uint8 __pyx_t_5numpy_uint8_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":784
+ *
+ * ctypedef npy_uint8 uint8_t
+ * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<<
+ * ctypedef npy_uint32 uint32_t
+ * ctypedef npy_uint64 uint64_t
+ */
+typedef npy_uint16 __pyx_t_5numpy_uint16_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":785
+ * ctypedef npy_uint8 uint8_t
+ * ctypedef npy_uint16 uint16_t
+ * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<<
+ * ctypedef npy_uint64 uint64_t
+ * #ctypedef npy_uint96 uint96_t
+ */
+typedef npy_uint32 __pyx_t_5numpy_uint32_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":786
+ * ctypedef npy_uint16 uint16_t
+ * ctypedef npy_uint32 uint32_t
+ * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<<
+ * #ctypedef npy_uint96 uint96_t
+ * #ctypedef npy_uint128 uint128_t
+ */
+typedef npy_uint64 __pyx_t_5numpy_uint64_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":790
+ * #ctypedef npy_uint128 uint128_t
+ *
+ * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<<
+ * ctypedef npy_float64 float64_t
+ * #ctypedef npy_float80 float80_t
+ */
+typedef npy_float32 __pyx_t_5numpy_float32_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":791
+ *
+ * ctypedef npy_float32 float32_t
+ * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<<
+ * #ctypedef npy_float80 float80_t
+ * #ctypedef npy_float128 float128_t
+ */
+typedef npy_float64 __pyx_t_5numpy_float64_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":800
+ * # The int types are mapped a bit surprising --
+ * # numpy.int corresponds to 'l' and numpy.long to 'q'
+ * ctypedef npy_long int_t # <<<<<<<<<<<<<<
+ * ctypedef npy_longlong long_t
+ * ctypedef npy_longlong longlong_t
+ */
+typedef npy_long __pyx_t_5numpy_int_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":801
+ * # numpy.int corresponds to 'l' and numpy.long to 'q'
+ * ctypedef npy_long int_t
+ * ctypedef npy_longlong long_t # <<<<<<<<<<<<<<
+ * ctypedef npy_longlong longlong_t
+ *
+ */
+typedef npy_longlong __pyx_t_5numpy_long_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":802
+ * ctypedef npy_long int_t
+ * ctypedef npy_longlong long_t
+ * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<<
+ *
+ * ctypedef npy_ulong uint_t
+ */
+typedef npy_longlong __pyx_t_5numpy_longlong_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":804
+ * ctypedef npy_longlong longlong_t
+ *
+ * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<<
+ * ctypedef npy_ulonglong ulong_t
+ * ctypedef npy_ulonglong ulonglong_t
+ */
+typedef npy_ulong __pyx_t_5numpy_uint_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":805
+ *
+ * ctypedef npy_ulong uint_t
+ * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<<
+ * ctypedef npy_ulonglong ulonglong_t
+ *
+ */
+typedef npy_ulonglong __pyx_t_5numpy_ulong_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":806
+ * ctypedef npy_ulong uint_t
+ * ctypedef npy_ulonglong ulong_t
+ * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<<
+ *
+ * ctypedef npy_intp intp_t
+ */
+typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":808
+ * ctypedef npy_ulonglong ulonglong_t
+ *
+ * ctypedef npy_intp intp_t # <<<<<<<<<<<<<<
+ * ctypedef npy_uintp uintp_t
+ *
+ */
+typedef npy_intp __pyx_t_5numpy_intp_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":809
+ *
+ * ctypedef npy_intp intp_t
+ * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<<
+ *
+ * ctypedef npy_double float_t
+ */
+typedef npy_uintp __pyx_t_5numpy_uintp_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":811
+ * ctypedef npy_uintp uintp_t
+ *
+ * ctypedef npy_double float_t # <<<<<<<<<<<<<<
+ * ctypedef npy_double double_t
+ * ctypedef npy_longdouble longdouble_t
+ */
+typedef npy_double __pyx_t_5numpy_float_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":812
+ *
+ * ctypedef npy_double float_t
+ * ctypedef npy_double double_t # <<<<<<<<<<<<<<
+ * ctypedef npy_longdouble longdouble_t
+ *
+ */
+typedef npy_double __pyx_t_5numpy_double_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":813
+ * ctypedef npy_double float_t
+ * ctypedef npy_double double_t
+ * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<<
+ *
+ * ctypedef npy_cfloat cfloat_t
+ */
+typedef npy_longdouble __pyx_t_5numpy_longdouble_t;
+
+/* "average_inner.pxd":15
+ * void* PyCObject_AsVoidPtr(object obj)
+ *
+ * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<<
+ * ctypedef np.uint32_t uINT_t
+ *
+ */
+typedef __pyx_t_5numpy_float32_t __pyx_t_13average_inner_REAL_t;
+
+/* "average_inner.pxd":16
+ *
+ * ctypedef np.float32_t REAL_t
+ * ctypedef np.uint32_t uINT_t # <<<<<<<<<<<<<<
+ *
+ * # BLAS routine signatures
+ */
+typedef __pyx_t_5numpy_uint32_t __pyx_t_13average_inner_uINT_t;
+/* Declarations.proto */
+#if CYTHON_CCOMPLEX
+ #ifdef __cplusplus
+ typedef ::std::complex< float > __pyx_t_float_complex;
+ #else
+ typedef float _Complex __pyx_t_float_complex;
+ #endif
+#else
+ typedef struct { float real, imag; } __pyx_t_float_complex;
+#endif
+static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float);
+
+/* Declarations.proto */
+#if CYTHON_CCOMPLEX
+ #ifdef __cplusplus
+ typedef ::std::complex< double > __pyx_t_double_complex;
+ #else
+ typedef double _Complex __pyx_t_double_complex;
+ #endif
+#else
+ typedef struct { double real, imag; } __pyx_t_double_complex;
+#endif
+static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double);
+
+
+/*--- Type declarations ---*/
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":815
+ * ctypedef npy_longdouble longdouble_t
+ *
+ * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<<
+ * ctypedef npy_cdouble cdouble_t
+ * ctypedef npy_clongdouble clongdouble_t
+ */
+typedef npy_cfloat __pyx_t_5numpy_cfloat_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":816
+ *
+ * ctypedef npy_cfloat cfloat_t
+ * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<<
+ * ctypedef npy_clongdouble clongdouble_t
+ *
+ */
+typedef npy_cdouble __pyx_t_5numpy_cdouble_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":817
+ * ctypedef npy_cfloat cfloat_t
+ * ctypedef npy_cdouble cdouble_t
+ * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<<
+ *
+ * ctypedef npy_cdouble complex_t
+ */
+typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t;
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":819
+ * ctypedef npy_clongdouble clongdouble_t
+ *
+ * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<<
+ *
+ * cdef inline object PyArray_MultiIterNew1(a):
+ */
+typedef npy_cdouble __pyx_t_5numpy_complex_t;
+struct __pyx_t_13average_inner_BaseSentenceVecsConfig;
+struct __pyx_t_13average_inner_FTSentenceVecsConfig;
+
+/* "average_inner.pxd":19
+ *
+ * # BLAS routine signatures
+ * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<<
+ * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil
+ *
+ */
+typedef void (*__pyx_t_13average_inner_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *);
+
+/* "average_inner.pxd":20
+ * # BLAS routine signatures
+ * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil
+ * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil # <<<<<<<<<<<<<<
+ *
+ * cdef saxpy_ptr saxpy
+ */
+typedef void (*__pyx_t_13average_inner_sscal_ptr)(int const *, float const *, float const *, int const *);
+
+/* "average_inner.pxd":34
+ * DEF MAX_NGRAMS = 40
+ *
+ * cdef struct BaseSentenceVecsConfig: # <<<<<<<<<<<<<<
+ * int size, workers
+ *
+ */
+struct __pyx_t_13average_inner_BaseSentenceVecsConfig {
+ int size;
+ int workers;
+ __pyx_t_13average_inner_REAL_t *mem;
+ __pyx_t_13average_inner_REAL_t *mem2;
+ __pyx_t_13average_inner_REAL_t *word_vectors;
+ __pyx_t_13average_inner_REAL_t *word_weights;
+ __pyx_t_13average_inner_REAL_t *sentence_vectors;
+ __pyx_t_13average_inner_uINT_t word_indices[0x2710];
+ __pyx_t_13average_inner_uINT_t sent_adresses[0x2710];
+ __pyx_t_13average_inner_uINT_t sentence_boundary[(0x2710 + 1)];
+};
+
+/* "average_inner.pxd":48
+ * uINT_t sentence_boundary[MAX_WORDS + 1]
+ *
+ * cdef struct FTSentenceVecsConfig: # <<<<<<<<<<<<<<
+ * int size, workers, min_n, max_n, bucket
+ *
+ */
+struct __pyx_t_13average_inner_FTSentenceVecsConfig {
+ int size;
+ int workers;
+ int min_n;
+ int max_n;
+ int bucket;
+ __pyx_t_13average_inner_REAL_t oov_weight;
+ __pyx_t_13average_inner_REAL_t *mem;
+ __pyx_t_13average_inner_REAL_t *mem2;
+ __pyx_t_13average_inner_REAL_t *word_vectors;
+ __pyx_t_13average_inner_REAL_t *ngram_vectors;
+ __pyx_t_13average_inner_REAL_t *word_weights;
+ __pyx_t_13average_inner_REAL_t *sentence_vectors;
+ __pyx_t_13average_inner_uINT_t word_indices[0x2710];
+ __pyx_t_13average_inner_uINT_t sent_adresses[0x2710];
+ __pyx_t_13average_inner_uINT_t sentence_boundary[(0x2710 + 1)];
+ __pyx_t_13average_inner_uINT_t subwords_idx_len[0x2710];
+ __pyx_t_13average_inner_uINT_t *subwords_idx;
+};
+
+/* --- Runtime support code (head) --- */
+/* Refnanny.proto */
+#ifndef CYTHON_REFNANNY
+ #define CYTHON_REFNANNY 0
+#endif
+#if CYTHON_REFNANNY
+ typedef struct {
+ void (*INCREF)(void*, PyObject*, int);
+ void (*DECREF)(void*, PyObject*, int);
+ void (*GOTREF)(void*, PyObject*, int);
+ void (*GIVEREF)(void*, PyObject*, int);
+ void* (*SetupContext)(const char*, int, const char*);
+ void (*FinishContext)(void**);
+ } __Pyx_RefNannyAPIStruct;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);
+ #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
+#ifdef WITH_THREAD
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ if (acquire_gil) {\
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ PyGILState_Release(__pyx_gilstate_save);\
+ } else {\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ }
+#else
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
+#endif
+ #define __Pyx_RefNannyFinishContext()\
+ __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
+ #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
+ #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
+ #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
+ #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
+#else
+ #define __Pyx_RefNannyDeclarations
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)
+ #define __Pyx_RefNannyFinishContext()
+ #define __Pyx_INCREF(r) Py_INCREF(r)
+ #define __Pyx_DECREF(r) Py_DECREF(r)
+ #define __Pyx_GOTREF(r)
+ #define __Pyx_GIVEREF(r)
+ #define __Pyx_XINCREF(r) Py_XINCREF(r)
+ #define __Pyx_XDECREF(r) Py_XDECREF(r)
+ #define __Pyx_XGOTREF(r)
+ #define __Pyx_XGIVEREF(r)
+#endif
+#define __Pyx_XDECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_XDECREF(tmp);\
+ } while (0)
+#define __Pyx_DECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_DECREF(tmp);\
+ } while (0)
+#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
+#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
+
+/* PyObjectGetAttrStr.proto */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);
+#else
+#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
+#endif
+
+/* GetBuiltinName.proto */
+static PyObject *__Pyx_GetBuiltinName(PyObject *name);
+
+/* GetItemInt.proto */
+#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\
+ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\
+ __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\
+ (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\
+ __Pyx_GetItemInt_Generic(o, to_py_func(i))))
+#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\
+ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\
+ __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\
+ (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL))
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i,
+ int wraparound, int boundscheck);
+#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\
+ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\
+ __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\
+ (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL))
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i,
+ int wraparound, int boundscheck);
+static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j);
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i,
+ int is_list, int wraparound, int boundscheck);
+
+/* ExtTypeTest.proto */
+static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type);
+
+/* PyDictVersioning.proto */
+#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
+#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
+#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
+#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
+ (version_var) = __PYX_GET_DICT_VERSION(dict);\
+ (cache_var) = (value);
+#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
+ static PY_UINT64_T __pyx_dict_version = 0;\
+ static PyObject *__pyx_dict_cached_value = NULL;\
+ if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
+ (VAR) = __pyx_dict_cached_value;\
+ } else {\
+ (VAR) = __pyx_dict_cached_value = (LOOKUP);\
+ __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
+ }\
+}
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
+static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
+#else
+#define __PYX_GET_DICT_VERSION(dict) (0)
+#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
+#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
+#endif
+
+/* GetModuleGlobalName.proto */
+#if CYTHON_USE_DICT_VERSIONS
+#define __Pyx_GetModuleGlobalName(var, name) {\
+ static PY_UINT64_T __pyx_dict_version = 0;\
+ static PyObject *__pyx_dict_cached_value = NULL;\
+ (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\
+ (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\
+ __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
+}
+#define __Pyx_GetModuleGlobalNameUncached(var, name) {\
+ PY_UINT64_T __pyx_dict_version;\
+ PyObject *__pyx_dict_cached_value;\
+ (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
+}
+static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value);
+#else
+#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name)
+#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name)
+static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name);
+#endif
+
+/* PyCFunctionFastCall.proto */
+#if CYTHON_FAST_PYCCALL
+static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs);
+#else
+#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL)
+#endif
+
+/* PyFunctionFastCall.proto */
+#if CYTHON_FAST_PYCALL
+#define __Pyx_PyFunction_FastCall(func, args, nargs)\
+ __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
+#if 1 || PY_VERSION_HEX < 0x030600B1
+static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs);
+#else
+#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
+#endif
+#define __Pyx_BUILD_ASSERT_EXPR(cond)\
+ (sizeof(char [1 - 2*!(cond)]) - 1)
+#ifndef Py_MEMBER_SIZE
+#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member)
+#endif
+ static size_t __pyx_pyframe_localsplus_offset = 0;
+ #include "frameobject.h"
+ #define __Pxy_PyFrame_Initialize_Offsets()\
+ ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\
+ (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus)))
+ #define __Pyx_PyFrame_GetLocalsplus(frame)\
+ (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset))
+#endif
+
+/* PyObjectCall.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw);
+#else
+#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw)
+#endif
+
+/* PyObjectCall2Args.proto */
+static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2);
+
+/* PyObjectCallMethO.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg);
+#endif
+
+/* PyObjectCallOneArg.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg);
+
+/* PySequenceContains.proto */
+static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) {
+ int result = PySequence_Contains(seq, item);
+ return unlikely(result < 0) ? result : (result == (eq == Py_EQ));
+}
+
+/* ObjectGetItem.proto */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key);
+#else
+#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key)
+#endif
+
+/* ListCompAppend.proto */
+#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS
+static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) {
+ PyListObject* L = (PyListObject*) list;
+ Py_ssize_t len = Py_SIZE(list);
+ if (likely(L->allocated > len)) {
+ Py_INCREF(x);
+ PyList_SET_ITEM(list, len, x);
+ Py_SIZE(list) = len+1;
+ return 0;
+ }
+ return PyList_Append(list, x);
+}
+#else
+#define __Pyx_ListComp_Append(L,x) PyList_Append(L,x)
+#endif
+
+/* SliceTupleAndList.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyList_GetSlice(PyObject* src, Py_ssize_t start, Py_ssize_t stop);
+static CYTHON_INLINE PyObject* __Pyx_PyTuple_GetSlice(PyObject* src, Py_ssize_t start, Py_ssize_t stop);
+#else
+#define __Pyx_PyList_GetSlice(seq, start, stop) PySequence_GetSlice(seq, start, stop)
+#define __Pyx_PyTuple_GetSlice(seq, start, stop) PySequence_GetSlice(seq, start, stop)
+#endif
+
+/* PyIntBinop.proto */
+#if !CYTHON_COMPILING_IN_PYPY
+static PyObject* __Pyx_PyInt_AddObjC(PyObject *op1, PyObject *op2, long intval, int inplace, int zerodivision_check);
+#else
+#define __Pyx_PyInt_AddObjC(op1, op2, intval, inplace, zerodivision_check)\
+ (inplace ? PyNumber_InPlaceAdd(op1, op2) : PyNumber_Add(op1, op2))
+#endif
+
+/* RaiseArgTupleInvalid.proto */
+static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
+ Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found);
+
+/* RaiseDoubleKeywords.proto */
+static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name);
+
+/* ParseKeywords.proto */
+static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\
+ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\
+ const char* function_name);
+
+/* RaiseTooManyValuesToUnpack.proto */
+static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);
+
+/* RaiseNeedMoreValuesToUnpack.proto */
+static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);
+
+/* IterFinish.proto */
+static CYTHON_INLINE int __Pyx_IterFinish(void);
+
+/* UnpackItemEndCheck.proto */
+static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected);
+
+/* PyThreadStateGet.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
+#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current;
+#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type
+#else
+#define __Pyx_PyThreadState_declare
+#define __Pyx_PyThreadState_assign
+#define __Pyx_PyErr_Occurred() PyErr_Occurred()
+#endif
+
+/* PyErrFetchRestore.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
+#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb)
+static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
+static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
+#else
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#endif
+#else
+#define __Pyx_PyErr_Clear() PyErr_Clear()
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb)
+#endif
+
+/* RaiseException.proto */
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause);
+
+/* DictGetItem.proto */
+#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
+static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key);
+#define __Pyx_PyObject_Dict_GetItem(obj, name)\
+ (likely(PyDict_CheckExact(obj)) ?\
+ __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name))
+#else
+#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key)
+#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name)
+#endif
+
+/* RaiseNoneIterError.proto */
+static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void);
+
+/* GetTopmostException.proto */
+#if CYTHON_USE_EXC_INFO_STACK
+static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate);
+#endif
+
+/* SaveResetException.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb)
+static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
+#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb)
+static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
+#else
+#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb)
+#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb)
+#endif
+
+/* PyErrExceptionMatches.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err)
+static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err);
+#else
+#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err)
+#endif
+
+/* GetException.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb)
+static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
+#else
+static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb);
+#endif
+
+/* TypeImport.proto */
+#ifndef __PYX_HAVE_RT_ImportType_proto
+#define __PYX_HAVE_RT_ImportType_proto
+enum __Pyx_ImportType_CheckSize {
+ __Pyx_ImportType_CheckSize_Error = 0,
+ __Pyx_ImportType_CheckSize_Warn = 1,
+ __Pyx_ImportType_CheckSize_Ignore = 2
+};
+static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size);
+#endif
+
+/* Import.proto */
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level);
+
+/* ImportFrom.proto */
+static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name);
+
+/* PyObjectCallNoArg.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func);
+#else
+#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL)
+#endif
+
+/* CLineInTraceback.proto */
+#ifdef CYTHON_CLINE_IN_TRACEBACK
+#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
+#else
+static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);
+#endif
+
+/* CodeObjectCache.proto */
+typedef struct {
+ PyCodeObject* code_object;
+ int code_line;
+} __Pyx_CodeObjectCacheEntry;
+struct __Pyx_CodeObjectCache {
+ int count;
+ int max_count;
+ __Pyx_CodeObjectCacheEntry* entries;
+};
+static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
+static PyCodeObject *__pyx_find_code_object(int code_line);
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
+
+/* AddTraceback.proto */
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename);
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value);
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_npy_uint32(npy_uint32 value);
+
+/* RealImag.proto */
+#if CYTHON_CCOMPLEX
+ #ifdef __cplusplus
+ #define __Pyx_CREAL(z) ((z).real())
+ #define __Pyx_CIMAG(z) ((z).imag())
+ #else
+ #define __Pyx_CREAL(z) (__real__(z))
+ #define __Pyx_CIMAG(z) (__imag__(z))
+ #endif
+#else
+ #define __Pyx_CREAL(z) ((z).real)
+ #define __Pyx_CIMAG(z) ((z).imag)
+#endif
+#if defined(__cplusplus) && CYTHON_CCOMPLEX\
+ && (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103)
+ #define __Pyx_SET_CREAL(z,x) ((z).real(x))
+ #define __Pyx_SET_CIMAG(z,y) ((z).imag(y))
+#else
+ #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x)
+ #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y)
+#endif
+
+/* Arithmetic.proto */
+#if CYTHON_CCOMPLEX
+ #define __Pyx_c_eq_float(a, b) ((a)==(b))
+ #define __Pyx_c_sum_float(a, b) ((a)+(b))
+ #define __Pyx_c_diff_float(a, b) ((a)-(b))
+ #define __Pyx_c_prod_float(a, b) ((a)*(b))
+ #define __Pyx_c_quot_float(a, b) ((a)/(b))
+ #define __Pyx_c_neg_float(a) (-(a))
+ #ifdef __cplusplus
+ #define __Pyx_c_is_zero_float(z) ((z)==(float)0)
+ #define __Pyx_c_conj_float(z) (::std::conj(z))
+ #if 1
+ #define __Pyx_c_abs_float(z) (::std::abs(z))
+ #define __Pyx_c_pow_float(a, b) (::std::pow(a, b))
+ #endif
+ #else
+ #define __Pyx_c_is_zero_float(z) ((z)==0)
+ #define __Pyx_c_conj_float(z) (conjf(z))
+ #if 1
+ #define __Pyx_c_abs_float(z) (cabsf(z))
+ #define __Pyx_c_pow_float(a, b) (cpowf(a, b))
+ #endif
+ #endif
+#else
+ static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex, __pyx_t_float_complex);
+ static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex, __pyx_t_float_complex);
+ static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex, __pyx_t_float_complex);
+ static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex, __pyx_t_float_complex);
+ static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex, __pyx_t_float_complex);
+ static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex);
+ static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex);
+ static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex);
+ #if 1
+ static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex);
+ static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex, __pyx_t_float_complex);
+ #endif
+#endif
+
+/* Arithmetic.proto */
+#if CYTHON_CCOMPLEX
+ #define __Pyx_c_eq_double(a, b) ((a)==(b))
+ #define __Pyx_c_sum_double(a, b) ((a)+(b))
+ #define __Pyx_c_diff_double(a, b) ((a)-(b))
+ #define __Pyx_c_prod_double(a, b) ((a)*(b))
+ #define __Pyx_c_quot_double(a, b) ((a)/(b))
+ #define __Pyx_c_neg_double(a) (-(a))
+ #ifdef __cplusplus
+ #define __Pyx_c_is_zero_double(z) ((z)==(double)0)
+ #define __Pyx_c_conj_double(z) (::std::conj(z))
+ #if 1
+ #define __Pyx_c_abs_double(z) (::std::abs(z))
+ #define __Pyx_c_pow_double(a, b) (::std::pow(a, b))
+ #endif
+ #else
+ #define __Pyx_c_is_zero_double(z) ((z)==0)
+ #define __Pyx_c_conj_double(z) (conj(z))
+ #if 1
+ #define __Pyx_c_abs_double(z) (cabs(z))
+ #define __Pyx_c_pow_double(a, b) (cpow(a, b))
+ #endif
+ #endif
+#else
+ static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex, __pyx_t_double_complex);
+ static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex, __pyx_t_double_complex);
+ static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex, __pyx_t_double_complex);
+ static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex, __pyx_t_double_complex);
+ static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex, __pyx_t_double_complex);
+ static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex);
+ static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex);
+ static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex);
+ #if 1
+ static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex);
+ static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex, __pyx_t_double_complex);
+ #endif
+#endif
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE npy_uint32 __Pyx_PyInt_As_npy_uint32(PyObject *);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
+
+/* FastTypeChecks.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);
+#else
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
+#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
+#endif
+#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
+
+/* CheckBinaryVersion.proto */
+static int __Pyx_check_binary_version(void);
+
+/* PyObjectSetAttrStr.proto */
+#if CYTHON_USE_TYPE_SLOTS
+#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL)
+static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value);
+#else
+#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n)
+#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v)
+#endif
+
+/* VoidPtrExport.proto */
+static int __Pyx_ExportVoidPtr(PyObject *name, void *p, const char *sig);
+
+/* FunctionExport.proto */
+static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig);
+
+/* InitStrings.proto */
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t);
+
+
+/* Module declarations from 'cpython.buffer' */
+
+/* Module declarations from 'libc.string' */
+
+/* Module declarations from 'libc.stdio' */
+
+/* Module declarations from '__builtin__' */
+
+/* Module declarations from 'cpython.type' */
+static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0;
+
+/* Module declarations from 'cpython' */
+
+/* Module declarations from 'cpython.object' */
+
+/* Module declarations from 'cpython.ref' */
+
+/* Module declarations from 'cpython.mem' */
+
+/* Module declarations from 'numpy' */
+
+/* Module declarations from 'numpy' */
+static PyTypeObject *__pyx_ptype_5numpy_dtype = 0;
+static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0;
+static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0;
+static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0;
+static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0;
+static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/
+
+/* Module declarations from 'cython' */
+
+/* Module declarations from 'average_inner' */
+static __pyx_t_13average_inner_saxpy_ptr __pyx_v_13average_inner_saxpy;
+static __pyx_t_13average_inner_sscal_ptr __pyx_v_13average_inner_sscal;
+static int __pyx_v_13average_inner_ONE;
+static int __pyx_v_13average_inner_ZERO;
+static __pyx_t_13average_inner_REAL_t __pyx_v_13average_inner_ONEF;
+static __pyx_t_13average_inner_REAL_t __pyx_v_13average_inner_ZEROF;
+static PyObject *__pyx_f_13average_inner_init_base_s2v_config(struct __pyx_t_13average_inner_BaseSentenceVecsConfig *, PyObject *, PyObject *, PyObject *); /*proto*/
+static PyObject *__pyx_f_13average_inner_init_ft_s2v_config(struct __pyx_t_13average_inner_FTSentenceVecsConfig *, PyObject *, PyObject *, PyObject *); /*proto*/
+static PyObject *__pyx_f_13average_inner_populate_base_s2v_config(struct __pyx_t_13average_inner_BaseSentenceVecsConfig *, PyObject *, PyObject *); /*proto*/
+static PyObject *__pyx_f_13average_inner_populate_ft_s2v_config(struct __pyx_t_13average_inner_FTSentenceVecsConfig *, PyObject *, PyObject *); /*proto*/
+static void __pyx_f_13average_inner_compute_base_sentence_averages(struct __pyx_t_13average_inner_BaseSentenceVecsConfig *, __pyx_t_13average_inner_uINT_t); /*proto*/
+static void __pyx_f_13average_inner_compute_ft_sentence_averages(struct __pyx_t_13average_inner_FTSentenceVecsConfig *, __pyx_t_13average_inner_uINT_t); /*proto*/
+#define __Pyx_MODULE_NAME "average_inner"
+extern int __pyx_module_is_main_average_inner;
+int __pyx_module_is_main_average_inner = 0;
+
+/* Implementation of 'average_inner' */
+static PyObject *__pyx_builtin_enumerate;
+static PyObject *__pyx_builtin_range;
+static PyObject *__pyx_builtin_ValueError;
+static PyObject *__pyx_builtin_RuntimeError;
+static PyObject *__pyx_builtin_ImportError;
+static const char __pyx_k__8[] = "*";
+static const char __pyx_k_ft[] = "ft";
+static const char __pyx_k_np[] = "np";
+static const char __pyx_k_sv[] = "sv";
+static const char __pyx_k_wv[] = "wv";
+static const char __pyx_k_ONE[] = "ONE";
+static const char __pyx_k_max[] = "max";
+static const char __pyx_k_w2v[] = "w2v";
+static const char __pyx_k_ONEF[] = "ONEF";
+static const char __pyx_k_ZERO[] = "ZERO";
+static const char __pyx_k_fill[] = "fill";
+static const char __pyx_k_init[] = "init";
+static const char __pyx_k_main[] = "__main__";
+static const char __pyx_k_name[] = "__name__";
+static const char __pyx_k_test[] = "__test__";
+static const char __pyx_k_ZEROF[] = "ZEROF";
+static const char __pyx_k_fblas[] = "fblas";
+static const char __pyx_k_index[] = "index";
+static const char __pyx_k_is_ft[] = "is_ft";
+static const char __pyx_k_max_n[] = "max_n";
+static const char __pyx_k_min_n[] = "min_n";
+static const char __pyx_k_model[] = "model";
+static const char __pyx_k_numpy[] = "numpy";
+static const char __pyx_k_range[] = "range";
+static const char __pyx_k_saxpy[] = "saxpy";
+static const char __pyx_k_sscal[] = "sscal";
+static const char __pyx_k_vocab[] = "vocab";
+static const char __pyx_k_bucket[] = "bucket";
+static const char __pyx_k_import[] = "__import__";
+static const char __pyx_k_memory[] = "memory";
+static const char __pyx_k_target[] = "target";
+static const char __pyx_k_vectors[] = "vectors";
+static const char __pyx_k_workers[] = "workers";
+static const char __pyx_k_cpointer[] = "_cpointer";
+static const char __pyx_k_pyx_capi[] = "__pyx_capi__";
+static const char __pyx_k_eff_words[] = "eff_words";
+static const char __pyx_k_enumerate[] = "enumerate";
+static const char __pyx_k_ValueError[] = "ValueError";
+static const char __pyx_k_ImportError[] = "ImportError";
+static const char __pyx_k_vector_size[] = "vector_size";
+static const char __pyx_k_FAST_VERSION[] = "FAST_VERSION";
+static const char __pyx_k_RuntimeError[] = "RuntimeError";
+static const char __pyx_k_word_weights[] = "word_weights";
+static const char __pyx_k_average_inner[] = "average_inner";
+static const char __pyx_k_eff_sentences[] = "eff_sentences";
+static const char __pyx_k_ft_hash_bytes[] = "ft_hash_bytes";
+static const char __pyx_k_vectors_vocab[] = "vectors_vocab";
+static const char __pyx_k_vectors_ngrams[] = "vectors_ngrams";
+static const char __pyx_k_train_average_cy[] = "train_average_cy";
+static const char __pyx_k_average_inner_pyx[] = "average_inner.pyx";
+static const char __pyx_k_indexed_sentences[] = "indexed_sentences";
+static const char __pyx_k_scipy_linalg_blas[] = "scipy.linalg.blas";
+static const char __pyx_k_MAX_WORDS_IN_BATCH[] = "MAX_WORDS_IN_BATCH";
+static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
+static const char __pyx_k_MAX_NGRAMS_IN_BATCH[] = "MAX_NGRAMS_IN_BATCH";
+static const char __pyx_k_compute_ngrams_bytes[] = "compute_ngrams_bytes";
+static const char __pyx_k_ndarray_is_not_C_contiguous[] = "ndarray is not C contiguous";
+static const char __pyx_k_gensim_models__utils_any2vec[] = "gensim.models._utils_any2vec";
+static const char __pyx_k_numpy_core_multiarray_failed_to[] = "numpy.core.multiarray failed to import";
+static const char __pyx_k_unknown_dtype_code_in_numpy_pxd[] = "unknown dtype code in numpy.pxd (%d)";
+static const char __pyx_k_Format_string_allocated_too_shor[] = "Format string allocated too short, see comment in numpy.pxd";
+static const char __pyx_k_Non_native_byte_order_not_suppor[] = "Non-native byte order not supported";
+static const char __pyx_k_Optimized_cython_functions_for_c[] = "Optimized cython functions for computing sentence embeddings";
+static const char __pyx_k_ndarray_is_not_Fortran_contiguou[] = "ndarray is not Fortran contiguous";
+static const char __pyx_k_numpy_core_umath_failed_to_impor[] = "numpy.core.umath failed to import";
+static const char __pyx_k_Format_string_allocated_too_shor_2[] = "Format string allocated too short.";
+static PyObject *__pyx_n_s_FAST_VERSION;
+static PyObject *__pyx_kp_u_Format_string_allocated_too_shor;
+static PyObject *__pyx_kp_u_Format_string_allocated_too_shor_2;
+static PyObject *__pyx_n_s_ImportError;
+static PyObject *__pyx_n_s_MAX_NGRAMS_IN_BATCH;
+static PyObject *__pyx_n_s_MAX_WORDS_IN_BATCH;
+static PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor;
+static PyObject *__pyx_n_s_ONE;
+static PyObject *__pyx_n_s_ONEF;
+static PyObject *__pyx_n_s_RuntimeError;
+static PyObject *__pyx_n_s_ValueError;
+static PyObject *__pyx_n_s_ZERO;
+static PyObject *__pyx_n_s_ZEROF;
+static PyObject *__pyx_n_s__8;
+static PyObject *__pyx_n_s_average_inner;
+static PyObject *__pyx_kp_s_average_inner_pyx;
+static PyObject *__pyx_n_s_bucket;
+static PyObject *__pyx_n_s_cline_in_traceback;
+static PyObject *__pyx_n_s_compute_ngrams_bytes;
+static PyObject *__pyx_n_s_cpointer;
+static PyObject *__pyx_n_s_eff_sentences;
+static PyObject *__pyx_n_s_eff_words;
+static PyObject *__pyx_n_s_enumerate;
+static PyObject *__pyx_n_s_fblas;
+static PyObject *__pyx_n_s_fill;
+static PyObject *__pyx_n_s_ft;
+static PyObject *__pyx_n_s_ft_hash_bytes;
+static PyObject *__pyx_n_s_gensim_models__utils_any2vec;
+static PyObject *__pyx_n_s_import;
+static PyObject *__pyx_n_s_index;
+static PyObject *__pyx_n_s_indexed_sentences;
+static PyObject *__pyx_n_s_init;
+static PyObject *__pyx_n_s_is_ft;
+static PyObject *__pyx_n_s_main;
+static PyObject *__pyx_n_s_max;
+static PyObject *__pyx_n_s_max_n;
+static PyObject *__pyx_n_s_memory;
+static PyObject *__pyx_n_s_min_n;
+static PyObject *__pyx_n_s_model;
+static PyObject *__pyx_n_s_name;
+static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous;
+static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou;
+static PyObject *__pyx_n_s_np;
+static PyObject *__pyx_n_s_numpy;
+static PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to;
+static PyObject *__pyx_kp_s_numpy_core_umath_failed_to_impor;
+static PyObject *__pyx_n_s_pyx_capi;
+static PyObject *__pyx_n_s_range;
+static PyObject *__pyx_n_s_saxpy;
+static PyObject *__pyx_n_s_scipy_linalg_blas;
+static PyObject *__pyx_n_s_sscal;
+static PyObject *__pyx_n_s_sv;
+static PyObject *__pyx_n_s_target;
+static PyObject *__pyx_n_s_test;
+static PyObject *__pyx_n_s_train_average_cy;
+static PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd;
+static PyObject *__pyx_n_s_vector_size;
+static PyObject *__pyx_n_s_vectors;
+static PyObject *__pyx_n_s_vectors_ngrams;
+static PyObject *__pyx_n_s_vectors_vocab;
+static PyObject *__pyx_n_s_vocab;
+static PyObject *__pyx_n_s_w2v;
+static PyObject *__pyx_n_s_word_weights;
+static PyObject *__pyx_n_s_workers;
+static PyObject *__pyx_n_s_wv;
+static PyObject *__pyx_pf_13average_inner_train_average_cy(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_indexed_sentences, PyObject *__pyx_v_target, PyObject *__pyx_v_memory); /* proto */
+static PyObject *__pyx_pf_13average_inner_2init(CYTHON_UNUSED PyObject *__pyx_self); /* proto */
+static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */
+static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */
+static PyObject *__pyx_int_0;
+static PyObject *__pyx_int_1;
+static PyObject *__pyx_int_40;
+static PyObject *__pyx_int_10000;
+static PyObject *__pyx_tuple_;
+static PyObject *__pyx_tuple__2;
+static PyObject *__pyx_tuple__3;
+static PyObject *__pyx_tuple__4;
+static PyObject *__pyx_tuple__5;
+static PyObject *__pyx_tuple__6;
+static PyObject *__pyx_tuple__7;
+static PyObject *__pyx_tuple__9;
+static PyObject *__pyx_codeobj__10;
+static PyObject *__pyx_codeobj__11;
+/* Late includes */
+
+/* "average_inner.pyx":36
+ * DEF MAX_NGRAMS = 40
+ *
+ * cdef init_base_s2v_config(BaseSentenceVecsConfig *c, model, target, memory): # <<<<<<<<<<<<<<
+ * """Load BaseAny2Vec parameters into a BaseSentenceVecsConfig struct.
+ *
+ */
+
+static PyObject *__pyx_f_13average_inner_init_base_s2v_config(struct __pyx_t_13average_inner_BaseSentenceVecsConfig *__pyx_v_c, PyObject *__pyx_v_model, PyObject *__pyx_v_target, PyObject *__pyx_v_memory) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("init_base_s2v_config", 0);
+
+ /* "average_inner.pyx":52
+ *
+ * """
+ * c[0].workers = model.workers # <<<<<<<<<<<<<<
+ * c[0].size = model.sv.vector_size
+ *
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_workers); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 52, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ (__pyx_v_c[0]).workers = __pyx_t_2;
+
+ /* "average_inner.pyx":53
+ * """
+ * c[0].workers = model.workers
+ * c[0].size = model.sv.vector_size # <<<<<<<<<<<<<<
+ *
+ * c[0].mem = (np.PyArray_DATA(memory[0]))
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_sv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 53, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 53, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ (__pyx_v_c[0]).size = __pyx_t_2;
+
+ /* "average_inner.pyx":55
+ * c[0].size = model.sv.vector_size
+ *
+ * c[0].mem = (np.PyArray_DATA(memory[0])) # <<<<<<<<<<<<<<
+ * c[0].mem2 = (np.PyArray_DATA(memory[2]))
+ *
+ */
+ __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_memory, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 55, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 55, __pyx_L1_error)
+ (__pyx_v_c[0]).mem = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3)));
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "average_inner.pyx":56
+ *
+ * c[0].mem = (np.PyArray_DATA(memory[0]))
+ * c[0].mem2 = (np.PyArray_DATA(memory[2])) # <<<<<<<<<<<<<<
+ *
+ * c[0].word_vectors = (np.PyArray_DATA(model.wv.vectors))
+ */
+ __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_memory, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 56, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 56, __pyx_L1_error)
+ (__pyx_v_c[0]).mem2 = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3)));
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "average_inner.pyx":58
+ * c[0].mem2 = (np.PyArray_DATA(memory[2]))
+ *
+ * c[0].word_vectors = (np.PyArray_DATA(model.wv.vectors)) # <<<<<<<<<<<<<<
+ * c[0].word_weights = (np.PyArray_DATA(model.word_weights))
+ *
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 58, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 58, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 58, __pyx_L1_error)
+ (__pyx_v_c[0]).word_vectors = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1)));
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":59
+ *
+ * c[0].word_vectors = (np.PyArray_DATA(model.wv.vectors))
+ * c[0].word_weights = (np.PyArray_DATA(model.word_weights)) # <<<<<<<<<<<<<<
+ *
+ * c[0].sentence_vectors = (np.PyArray_DATA(target))
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_word_weights); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 59, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 59, __pyx_L1_error)
+ (__pyx_v_c[0]).word_weights = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1)));
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":61
+ * c[0].word_weights = (np.PyArray_DATA(model.word_weights))
+ *
+ * c[0].sentence_vectors = (np.PyArray_DATA(target)) # <<<<<<<<<<<<<<
+ *
+ * cdef init_ft_s2v_config(FTSentenceVecsConfig *c, model, target, memory):
+ */
+ if (!(likely(((__pyx_v_target) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_target, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 61, __pyx_L1_error)
+ (__pyx_v_c[0]).sentence_vectors = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_target)));
+
+ /* "average_inner.pyx":36
+ * DEF MAX_NGRAMS = 40
+ *
+ * cdef init_base_s2v_config(BaseSentenceVecsConfig *c, model, target, memory): # <<<<<<<<<<<<<<
+ * """Load BaseAny2Vec parameters into a BaseSentenceVecsConfig struct.
+ *
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("average_inner.init_base_s2v_config", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "average_inner.pyx":63
+ * c[0].sentence_vectors = (np.PyArray_DATA(target))
+ *
+ * cdef init_ft_s2v_config(FTSentenceVecsConfig *c, model, target, memory): # <<<<<<<<<<<<<<
+ * """Load Fasttext parameters into a FTSentenceVecsConfig struct.
+ *
+ */
+
+static PyObject *__pyx_f_13average_inner_init_ft_s2v_config(struct __pyx_t_13average_inner_FTSentenceVecsConfig *__pyx_v_c, PyObject *__pyx_v_model, PyObject *__pyx_v_target, PyObject *__pyx_v_memory) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ __pyx_t_13average_inner_REAL_t __pyx_t_6;
+ __Pyx_RefNannySetupContext("init_ft_s2v_config", 0);
+
+ /* "average_inner.pyx":80
+ * """
+ *
+ * c[0].workers = model.workers # <<<<<<<<<<<<<<
+ * c[0].size = model.sv.vector_size
+ * c[0].min_n = model.wv.min_n
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_workers); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 80, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 80, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ (__pyx_v_c[0]).workers = __pyx_t_2;
+
+ /* "average_inner.pyx":81
+ *
+ * c[0].workers = model.workers
+ * c[0].size = model.sv.vector_size # <<<<<<<<<<<<<<
+ * c[0].min_n = model.wv.min_n
+ * c[0].max_n = model.wv.max_n
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_sv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 81, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 81, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 81, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ (__pyx_v_c[0]).size = __pyx_t_2;
+
+ /* "average_inner.pyx":82
+ * c[0].workers = model.workers
+ * c[0].size = model.sv.vector_size
+ * c[0].min_n = model.wv.min_n # <<<<<<<<<<<<<<
+ * c[0].max_n = model.wv.max_n
+ * c[0].bucket = model.wv.bucket
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 82, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_min_n); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 82, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 82, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ (__pyx_v_c[0]).min_n = __pyx_t_2;
+
+ /* "average_inner.pyx":83
+ * c[0].size = model.sv.vector_size
+ * c[0].min_n = model.wv.min_n
+ * c[0].max_n = model.wv.max_n # <<<<<<<<<<<<<<
+ * c[0].bucket = model.wv.bucket
+ *
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 83, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_max_n); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 83, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 83, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ (__pyx_v_c[0]).max_n = __pyx_t_2;
+
+ /* "average_inner.pyx":84
+ * c[0].min_n = model.wv.min_n
+ * c[0].max_n = model.wv.max_n
+ * c[0].bucket = model.wv.bucket # <<<<<<<<<<<<<<
+ *
+ * c[0].oov_weight = np.max(model.word_weights)
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 84, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_bucket); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 84, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 84, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ (__pyx_v_c[0]).bucket = __pyx_t_2;
+
+ /* "average_inner.pyx":86
+ * c[0].bucket = model.wv.bucket
+ *
+ * c[0].oov_weight = np.max(model.word_weights) # <<<<<<<<<<<<<<
+ *
+ * c[0].mem = (np.PyArray_DATA(memory[0]))
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_np); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 86, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_max); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 86, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_word_weights); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 86, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_5 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) {
+ __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4);
+ if (likely(__pyx_t_5)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
+ __Pyx_INCREF(__pyx_t_5);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_4, function);
+ }
+ }
+ __pyx_t_1 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_t_3) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 86, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __pyx_PyFloat_AsFloat(__pyx_t_1); if (unlikely((__pyx_t_6 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 86, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ (__pyx_v_c[0]).oov_weight = ((__pyx_t_13average_inner_REAL_t)__pyx_t_6);
+
+ /* "average_inner.pyx":88
+ * c[0].oov_weight = np.max(model.word_weights)
+ *
+ * c[0].mem = (np.PyArray_DATA(memory[0])) # <<<<<<<<<<<<<<
+ * c[0].mem2 = (np.PyArray_DATA(memory[2]))
+ *
+ */
+ __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_memory, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 88, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 88, __pyx_L1_error)
+ (__pyx_v_c[0]).mem = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1)));
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":89
+ *
+ * c[0].mem = (np.PyArray_DATA(memory[0]))
+ * c[0].mem2 = (np.PyArray_DATA(memory[2])) # <<<<<<<<<<<<<<
+ *
+ * memory[1].fill(ZERO) # Reset the ngram storage before filling the struct
+ */
+ __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_memory, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 89, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 89, __pyx_L1_error)
+ (__pyx_v_c[0]).mem2 = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1)));
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":91
+ * c[0].mem2 = (np.PyArray_DATA(memory[2]))
+ *
+ * memory[1].fill(ZERO) # Reset the ngram storage before filling the struct # <<<<<<<<<<<<<<
+ * c[0].subwords_idx = (np.PyArray_DATA(memory[1]))
+ *
+ */
+ __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_memory, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 91, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_fill); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 91, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_13average_inner_ZERO); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 91, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_5 = NULL;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_5)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_5);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_1 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 91, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":92
+ *
+ * memory[1].fill(ZERO) # Reset the ngram storage before filling the struct
+ * c[0].subwords_idx = (np.PyArray_DATA(memory[1])) # <<<<<<<<<<<<<<
+ *
+ * c[0].word_vectors = (np.PyArray_DATA(model.wv.vectors_vocab))
+ */
+ __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_memory, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 92, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 92, __pyx_L1_error)
+ (__pyx_v_c[0]).subwords_idx = ((__pyx_t_13average_inner_uINT_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1)));
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":94
+ * c[0].subwords_idx = (np.PyArray_DATA(memory[1]))
+ *
+ * c[0].word_vectors = (np.PyArray_DATA(model.wv.vectors_vocab)) # <<<<<<<<<<<<<<
+ * c[0].ngram_vectors = (np.PyArray_DATA(model.wv.vectors_ngrams))
+ * c[0].word_weights = (np.PyArray_DATA(model.word_weights))
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 94, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 94, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 94, __pyx_L1_error)
+ (__pyx_v_c[0]).word_vectors = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3)));
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "average_inner.pyx":95
+ *
+ * c[0].word_vectors = (np.PyArray_DATA(model.wv.vectors_vocab))
+ * c[0].ngram_vectors = (np.PyArray_DATA(model.wv.vectors_ngrams)) # <<<<<<<<<<<<<<
+ * c[0].word_weights = (np.PyArray_DATA(model.word_weights))
+ *
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 95, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_ngrams); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 95, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 95, __pyx_L1_error)
+ (__pyx_v_c[0]).ngram_vectors = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1)));
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":96
+ * c[0].word_vectors = (np.PyArray_DATA(model.wv.vectors_vocab))
+ * c[0].ngram_vectors = (np.PyArray_DATA(model.wv.vectors_ngrams))
+ * c[0].word_weights = (np.PyArray_DATA(model.word_weights)) # <<<<<<<<<<<<<<
+ *
+ * c[0].sentence_vectors = (np.PyArray_DATA(target))
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_word_weights); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 96, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 96, __pyx_L1_error)
+ (__pyx_v_c[0]).word_weights = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1)));
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":98
+ * c[0].word_weights = (np.PyArray_DATA(model.word_weights))
+ *
+ * c[0].sentence_vectors = (np.PyArray_DATA(target)) # <<<<<<<<<<<<<<
+ *
+ * cdef object populate_base_s2v_config(BaseSentenceVecsConfig *c, vocab, indexed_sentences):
+ */
+ if (!(likely(((__pyx_v_target) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_target, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 98, __pyx_L1_error)
+ (__pyx_v_c[0]).sentence_vectors = ((__pyx_t_13average_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_target)));
+
+ /* "average_inner.pyx":63
+ * c[0].sentence_vectors = (np.PyArray_DATA(target))
+ *
+ * cdef init_ft_s2v_config(FTSentenceVecsConfig *c, model, target, memory): # <<<<<<<<<<<<<<
+ * """Load Fasttext parameters into a FTSentenceVecsConfig struct.
+ *
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_AddTraceback("average_inner.init_ft_s2v_config", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "average_inner.pyx":100
+ * c[0].sentence_vectors = (np.PyArray_DATA(target))
+ *
+ * cdef object populate_base_s2v_config(BaseSentenceVecsConfig *c, vocab, indexed_sentences): # <<<<<<<<<<<<<<
+ * """Prepare C structures for BaseAny2VecModel so we can go "full C" and release the Python GIL.
+ *
+ */
+
+static PyObject *__pyx_f_13average_inner_populate_base_s2v_config(struct __pyx_t_13average_inner_BaseSentenceVecsConfig *__pyx_v_c, PyObject *__pyx_v_vocab, PyObject *__pyx_v_indexed_sentences) {
+ __pyx_t_13average_inner_uINT_t __pyx_v_eff_words;
+ __pyx_t_13average_inner_uINT_t __pyx_v_eff_sents;
+ PyObject *__pyx_v_obj = NULL;
+ PyObject *__pyx_v_token = NULL;
+ PyObject *__pyx_v_word = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ Py_ssize_t __pyx_t_2;
+ PyObject *(*__pyx_t_3)(PyObject *);
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ PyObject *__pyx_t_7 = NULL;
+ Py_ssize_t __pyx_t_8;
+ PyObject *(*__pyx_t_9)(PyObject *);
+ PyObject *__pyx_t_10 = NULL;
+ __pyx_t_13average_inner_uINT_t __pyx_t_11;
+ __Pyx_RefNannySetupContext("populate_base_s2v_config", 0);
+
+ /* "average_inner.pyx":124
+ * """
+ *
+ * cdef uINT_t eff_words = ZERO # Effective words encountered in a sentence # <<<<<<<<<<<<<<
+ * cdef uINT_t eff_sents = ZERO # Effective sentences encountered
+ *
+ */
+ __pyx_v_eff_words = __pyx_v_13average_inner_ZERO;
+
+ /* "average_inner.pyx":125
+ *
+ * cdef uINT_t eff_words = ZERO # Effective words encountered in a sentence
+ * cdef uINT_t eff_sents = ZERO # Effective sentences encountered # <<<<<<<<<<<<<<
+ *
+ * c.sentence_boundary[0] = ZERO
+ */
+ __pyx_v_eff_sents = __pyx_v_13average_inner_ZERO;
+
+ /* "average_inner.pyx":127
+ * cdef uINT_t eff_sents = ZERO # Effective sentences encountered
+ *
+ * c.sentence_boundary[0] = ZERO # <<<<<<<<<<<<<<
+ *
+ * for obj in indexed_sentences:
+ */
+ (__pyx_v_c->sentence_boundary[0]) = __pyx_v_13average_inner_ZERO;
+
+ /* "average_inner.pyx":129
+ * c.sentence_boundary[0] = ZERO
+ *
+ * for obj in indexed_sentences: # <<<<<<<<<<<<<<
+ * if not obj[0]:
+ * continue
+ */
+ if (likely(PyList_CheckExact(__pyx_v_indexed_sentences)) || PyTuple_CheckExact(__pyx_v_indexed_sentences)) {
+ __pyx_t_1 = __pyx_v_indexed_sentences; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0;
+ __pyx_t_3 = NULL;
+ } else {
+ __pyx_t_2 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_indexed_sentences); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 129, __pyx_L1_error)
+ }
+ for (;;) {
+ if (likely(!__pyx_t_3)) {
+ if (likely(PyList_CheckExact(__pyx_t_1))) {
+ if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_4); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 129, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 129, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ } else {
+ if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_4); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 129, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 129, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ }
+ } else {
+ __pyx_t_4 = __pyx_t_3(__pyx_t_1);
+ if (unlikely(!__pyx_t_4)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 129, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_4);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_obj, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "average_inner.pyx":130
+ *
+ * for obj in indexed_sentences:
+ * if not obj[0]: # <<<<<<<<<<<<<<
+ * continue
+ * for token in obj[0]:
+ */
+ __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_obj, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 130, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 130, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = ((!__pyx_t_5) != 0);
+ if (__pyx_t_6) {
+
+ /* "average_inner.pyx":131
+ * for obj in indexed_sentences:
+ * if not obj[0]:
+ * continue # <<<<<<<<<<<<<<
+ * for token in obj[0]:
+ * word = vocab[token] if token in vocab else None # Vocab obj
+ */
+ goto __pyx_L3_continue;
+
+ /* "average_inner.pyx":130
+ *
+ * for obj in indexed_sentences:
+ * if not obj[0]: # <<<<<<<<<<<<<<
+ * continue
+ * for token in obj[0]:
+ */
+ }
+
+ /* "average_inner.pyx":132
+ * if not obj[0]:
+ * continue
+ * for token in obj[0]: # <<<<<<<<<<<<<<
+ * word = vocab[token] if token in vocab else None # Vocab obj
+ * if word is None:
+ */
+ __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_obj, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 132, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (likely(PyList_CheckExact(__pyx_t_4)) || PyTuple_CheckExact(__pyx_t_4)) {
+ __pyx_t_7 = __pyx_t_4; __Pyx_INCREF(__pyx_t_7); __pyx_t_8 = 0;
+ __pyx_t_9 = NULL;
+ } else {
+ __pyx_t_8 = -1; __pyx_t_7 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 132, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_9 = Py_TYPE(__pyx_t_7)->tp_iternext; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 132, __pyx_L1_error)
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ for (;;) {
+ if (likely(!__pyx_t_9)) {
+ if (likely(PyList_CheckExact(__pyx_t_7))) {
+ if (__pyx_t_8 >= PyList_GET_SIZE(__pyx_t_7)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyList_GET_ITEM(__pyx_t_7, __pyx_t_8); __Pyx_INCREF(__pyx_t_4); __pyx_t_8++; if (unlikely(0 < 0)) __PYX_ERR(0, 132, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_7, __pyx_t_8); __pyx_t_8++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 132, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ } else {
+ if (__pyx_t_8 >= PyTuple_GET_SIZE(__pyx_t_7)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_7, __pyx_t_8); __Pyx_INCREF(__pyx_t_4); __pyx_t_8++; if (unlikely(0 < 0)) __PYX_ERR(0, 132, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_7, __pyx_t_8); __pyx_t_8++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 132, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ }
+ } else {
+ __pyx_t_4 = __pyx_t_9(__pyx_t_7);
+ if (unlikely(!__pyx_t_4)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 132, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_4);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "average_inner.pyx":133
+ * continue
+ * for token in obj[0]:
+ * word = vocab[token] if token in vocab else None # Vocab obj # <<<<<<<<<<<<<<
+ * if word is None:
+ * continue
+ */
+ __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vocab, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 133, __pyx_L1_error)
+ if ((__pyx_t_6 != 0)) {
+ __pyx_t_10 = __Pyx_PyObject_GetItem(__pyx_v_vocab, __pyx_v_token); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 133, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_10);
+ __pyx_t_4 = __pyx_t_10;
+ __pyx_t_10 = 0;
+ } else {
+ __Pyx_INCREF(Py_None);
+ __pyx_t_4 = Py_None;
+ }
+ __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "average_inner.pyx":134
+ * for token in obj[0]:
+ * word = vocab[token] if token in vocab else None # Vocab obj
+ * if word is None: # <<<<<<<<<<<<<<
+ * continue
+ * c.word_indices[eff_words] = word.index
+ */
+ __pyx_t_6 = (__pyx_v_word == Py_None);
+ __pyx_t_5 = (__pyx_t_6 != 0);
+ if (__pyx_t_5) {
+
+ /* "average_inner.pyx":135
+ * word = vocab[token] if token in vocab else None # Vocab obj
+ * if word is None:
+ * continue # <<<<<<<<<<<<<<
+ * c.word_indices[eff_words] = word.index
+ * c.sent_adresses[eff_words] = obj[1]
+ */
+ goto __pyx_L6_continue;
+
+ /* "average_inner.pyx":134
+ * for token in obj[0]:
+ * word = vocab[token] if token in vocab else None # Vocab obj
+ * if word is None: # <<<<<<<<<<<<<<
+ * continue
+ * c.word_indices[eff_words] = word.index
+ */
+ }
+
+ /* "average_inner.pyx":136
+ * if word is None:
+ * continue
+ * c.word_indices[eff_words] = word.index # <<<<<<<<<<<<<<
+ * c.sent_adresses[eff_words] = obj[1]
+ *
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 136, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_11 = __Pyx_PyInt_As_npy_uint32(__pyx_t_4); if (unlikely((__pyx_t_11 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 136, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ (__pyx_v_c->word_indices[__pyx_v_eff_words]) = ((__pyx_t_13average_inner_uINT_t)__pyx_t_11);
+
+ /* "average_inner.pyx":137
+ * continue
+ * c.word_indices[eff_words] = word.index
+ * c.sent_adresses[eff_words] = obj[1] # <<<<<<<<<<<<<<
+ *
+ * eff_words += ONE
+ */
+ __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_obj, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 137, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_11 = __Pyx_PyInt_As_npy_uint32(__pyx_t_4); if (unlikely((__pyx_t_11 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 137, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ (__pyx_v_c->sent_adresses[__pyx_v_eff_words]) = ((__pyx_t_13average_inner_uINT_t)__pyx_t_11);
+
+ /* "average_inner.pyx":139
+ * c.sent_adresses[eff_words] = obj[1]
+ *
+ * eff_words += ONE # <<<<<<<<<<<<<<
+ * if eff_words == MAX_WORDS:
+ * break
+ */
+ __pyx_v_eff_words = (__pyx_v_eff_words + __pyx_v_13average_inner_ONE);
+
+ /* "average_inner.pyx":140
+ *
+ * eff_words += ONE
+ * if eff_words == MAX_WORDS: # <<<<<<<<<<<<<<
+ * break
+ * eff_sents += 1
+ */
+ __pyx_t_5 = ((__pyx_v_eff_words == 0x2710) != 0);
+ if (__pyx_t_5) {
+
+ /* "average_inner.pyx":141
+ * eff_words += ONE
+ * if eff_words == MAX_WORDS:
+ * break # <<<<<<<<<<<<<<
+ * eff_sents += 1
+ * c.sentence_boundary[eff_sents] = eff_words
+ */
+ goto __pyx_L7_break;
+
+ /* "average_inner.pyx":140
+ *
+ * eff_words += ONE
+ * if eff_words == MAX_WORDS: # <<<<<<<<<<<<<<
+ * break
+ * eff_sents += 1
+ */
+ }
+
+ /* "average_inner.pyx":132
+ * if not obj[0]:
+ * continue
+ * for token in obj[0]: # <<<<<<<<<<<<<<
+ * word = vocab[token] if token in vocab else None # Vocab obj
+ * if word is None:
+ */
+ __pyx_L6_continue:;
+ }
+ __pyx_L7_break:;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+
+ /* "average_inner.pyx":142
+ * if eff_words == MAX_WORDS:
+ * break
+ * eff_sents += 1 # <<<<<<<<<<<<<<
+ * c.sentence_boundary[eff_sents] = eff_words
+ *
+ */
+ __pyx_v_eff_sents = (__pyx_v_eff_sents + 1);
+
+ /* "average_inner.pyx":143
+ * break
+ * eff_sents += 1
+ * c.sentence_boundary[eff_sents] = eff_words # <<<<<<<<<<<<<<
+ *
+ * if eff_words == MAX_WORDS:
+ */
+ (__pyx_v_c->sentence_boundary[__pyx_v_eff_sents]) = __pyx_v_eff_words;
+
+ /* "average_inner.pyx":145
+ * c.sentence_boundary[eff_sents] = eff_words
+ *
+ * if eff_words == MAX_WORDS: # <<<<<<<<<<<<<<
+ * break
+ *
+ */
+ __pyx_t_5 = ((__pyx_v_eff_words == 0x2710) != 0);
+ if (__pyx_t_5) {
+
+ /* "average_inner.pyx":146
+ *
+ * if eff_words == MAX_WORDS:
+ * break # <<<<<<<<<<<<<<
+ *
+ * return eff_sents, eff_words
+ */
+ goto __pyx_L4_break;
+
+ /* "average_inner.pyx":145
+ * c.sentence_boundary[eff_sents] = eff_words
+ *
+ * if eff_words == MAX_WORDS: # <<<<<<<<<<<<<<
+ * break
+ *
+ */
+ }
+
+ /* "average_inner.pyx":129
+ * c.sentence_boundary[0] = ZERO
+ *
+ * for obj in indexed_sentences: # <<<<<<<<<<<<<<
+ * if not obj[0]:
+ * continue
+ */
+ __pyx_L3_continue:;
+ }
+ __pyx_L4_break:;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":148
+ * break
+ *
+ * return eff_sents, eff_words # <<<<<<<<<<<<<<
+ *
+ * cdef object populate_ft_s2v_config(FTSentenceVecsConfig *c, vocab, indexed_sentences):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_From_npy_uint32(__pyx_v_eff_sents); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 148, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_7 = __Pyx_PyInt_From_npy_uint32(__pyx_v_eff_words); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 148, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 148, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_7);
+ __pyx_t_1 = 0;
+ __pyx_t_7 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "average_inner.pyx":100
+ * c[0].sentence_vectors = (np.PyArray_DATA(target))
+ *
+ * cdef object populate_base_s2v_config(BaseSentenceVecsConfig *c, vocab, indexed_sentences): # <<<<<<<<<<<<<<
+ * """Prepare C structures for BaseAny2VecModel so we can go "full C" and release the Python GIL.
+ *
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_10);
+ __Pyx_AddTraceback("average_inner.populate_base_s2v_config", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_obj);
+ __Pyx_XDECREF(__pyx_v_token);
+ __Pyx_XDECREF(__pyx_v_word);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "average_inner.pyx":150
+ * return eff_sents, eff_words
+ *
+ * cdef object populate_ft_s2v_config(FTSentenceVecsConfig *c, vocab, indexed_sentences): # <<<<<<<<<<<<<<
+ * """Prepare C structures for FastText so we can go "full C" and release the Python GIL.
+ *
+ */
+
+static PyObject *__pyx_f_13average_inner_populate_ft_s2v_config(struct __pyx_t_13average_inner_FTSentenceVecsConfig *__pyx_v_c, PyObject *__pyx_v_vocab, PyObject *__pyx_v_indexed_sentences) {
+ __pyx_t_13average_inner_uINT_t __pyx_v_eff_words;
+ __pyx_t_13average_inner_uINT_t __pyx_v_eff_sents;
+ PyObject *__pyx_v_obj = NULL;
+ PyObject *__pyx_v_token = NULL;
+ PyObject *__pyx_v_word = NULL;
+ PyObject *__pyx_v_encoded_ngrams = NULL;
+ PyObject *__pyx_v_hashes = NULL;
+ PyObject *__pyx_v_i = NULL;
+ PyObject *__pyx_v_h = NULL;
+ PyObject *__pyx_v_n = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ Py_ssize_t __pyx_t_2;
+ PyObject *(*__pyx_t_3)(PyObject *);
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ PyObject *__pyx_t_7 = NULL;
+ Py_ssize_t __pyx_t_8;
+ PyObject *(*__pyx_t_9)(PyObject *);
+ __pyx_t_13average_inner_uINT_t __pyx_t_10;
+ PyObject *__pyx_t_11 = NULL;
+ PyObject *__pyx_t_12 = NULL;
+ PyObject *__pyx_t_13 = NULL;
+ PyObject *__pyx_t_14 = NULL;
+ int __pyx_t_15;
+ PyObject *__pyx_t_16 = NULL;
+ Py_ssize_t __pyx_t_17;
+ PyObject *(*__pyx_t_18)(PyObject *);
+ long __pyx_t_19;
+ Py_ssize_t __pyx_t_20;
+ __Pyx_RefNannySetupContext("populate_ft_s2v_config", 0);
+
+ /* "average_inner.pyx":174
+ * """
+ *
+ * cdef uINT_t eff_words = ZERO # Effective words encountered in a sentence # <<<<<<<<<<<<<<
+ * cdef uINT_t eff_sents = ZERO # Effective sentences encountered
+ *
+ */
+ __pyx_v_eff_words = __pyx_v_13average_inner_ZERO;
+
+ /* "average_inner.pyx":175
+ *
+ * cdef uINT_t eff_words = ZERO # Effective words encountered in a sentence
+ * cdef uINT_t eff_sents = ZERO # Effective sentences encountered # <<<<<<<<<<<<<<
+ *
+ * c.sentence_boundary[0] = ZERO
+ */
+ __pyx_v_eff_sents = __pyx_v_13average_inner_ZERO;
+
+ /* "average_inner.pyx":177
+ * cdef uINT_t eff_sents = ZERO # Effective sentences encountered
+ *
+ * c.sentence_boundary[0] = ZERO # <<<<<<<<<<<<<<
+ *
+ * for obj in indexed_sentences:
+ */
+ (__pyx_v_c->sentence_boundary[0]) = __pyx_v_13average_inner_ZERO;
+
+ /* "average_inner.pyx":179
+ * c.sentence_boundary[0] = ZERO
+ *
+ * for obj in indexed_sentences: # <<<<<<<<<<<<<<
+ * if not obj[0]:
+ * continue
+ */
+ if (likely(PyList_CheckExact(__pyx_v_indexed_sentences)) || PyTuple_CheckExact(__pyx_v_indexed_sentences)) {
+ __pyx_t_1 = __pyx_v_indexed_sentences; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0;
+ __pyx_t_3 = NULL;
+ } else {
+ __pyx_t_2 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_indexed_sentences); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 179, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 179, __pyx_L1_error)
+ }
+ for (;;) {
+ if (likely(!__pyx_t_3)) {
+ if (likely(PyList_CheckExact(__pyx_t_1))) {
+ if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_4); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 179, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 179, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ } else {
+ if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_4); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 179, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 179, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ }
+ } else {
+ __pyx_t_4 = __pyx_t_3(__pyx_t_1);
+ if (unlikely(!__pyx_t_4)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 179, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_4);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_obj, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "average_inner.pyx":180
+ *
+ * for obj in indexed_sentences:
+ * if not obj[0]: # <<<<<<<<<<<<<<
+ * continue
+ * for token in obj[0]:
+ */
+ __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_obj, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 180, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 180, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = ((!__pyx_t_5) != 0);
+ if (__pyx_t_6) {
+
+ /* "average_inner.pyx":181
+ * for obj in indexed_sentences:
+ * if not obj[0]:
+ * continue # <<<<<<<<<<<<<<
+ * for token in obj[0]:
+ * c.sent_adresses[eff_words] = obj[1]
+ */
+ goto __pyx_L3_continue;
+
+ /* "average_inner.pyx":180
+ *
+ * for obj in indexed_sentences:
+ * if not obj[0]: # <<<<<<<<<<<<<<
+ * continue
+ * for token in obj[0]:
+ */
+ }
+
+ /* "average_inner.pyx":182
+ * if not obj[0]:
+ * continue
+ * for token in obj[0]: # <<<<<<<<<<<<<<
+ * c.sent_adresses[eff_words] = obj[1]
+ * if token in vocab:
+ */
+ __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_obj, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 182, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (likely(PyList_CheckExact(__pyx_t_4)) || PyTuple_CheckExact(__pyx_t_4)) {
+ __pyx_t_7 = __pyx_t_4; __Pyx_INCREF(__pyx_t_7); __pyx_t_8 = 0;
+ __pyx_t_9 = NULL;
+ } else {
+ __pyx_t_8 = -1; __pyx_t_7 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 182, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_9 = Py_TYPE(__pyx_t_7)->tp_iternext; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 182, __pyx_L1_error)
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ for (;;) {
+ if (likely(!__pyx_t_9)) {
+ if (likely(PyList_CheckExact(__pyx_t_7))) {
+ if (__pyx_t_8 >= PyList_GET_SIZE(__pyx_t_7)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyList_GET_ITEM(__pyx_t_7, __pyx_t_8); __Pyx_INCREF(__pyx_t_4); __pyx_t_8++; if (unlikely(0 < 0)) __PYX_ERR(0, 182, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_7, __pyx_t_8); __pyx_t_8++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 182, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ } else {
+ if (__pyx_t_8 >= PyTuple_GET_SIZE(__pyx_t_7)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_7, __pyx_t_8); __Pyx_INCREF(__pyx_t_4); __pyx_t_8++; if (unlikely(0 < 0)) __PYX_ERR(0, 182, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_7, __pyx_t_8); __pyx_t_8++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 182, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ }
+ } else {
+ __pyx_t_4 = __pyx_t_9(__pyx_t_7);
+ if (unlikely(!__pyx_t_4)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 182, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_4);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "average_inner.pyx":183
+ * continue
+ * for token in obj[0]:
+ * c.sent_adresses[eff_words] = obj[1] # <<<<<<<<<<<<<<
+ * if token in vocab:
+ * # In Vocabulary
+ */
+ __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_obj, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 183, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_10 = __Pyx_PyInt_As_npy_uint32(__pyx_t_4); if (unlikely((__pyx_t_10 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 183, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ (__pyx_v_c->sent_adresses[__pyx_v_eff_words]) = ((__pyx_t_13average_inner_uINT_t)__pyx_t_10);
+
+ /* "average_inner.pyx":184
+ * for token in obj[0]:
+ * c.sent_adresses[eff_words] = obj[1]
+ * if token in vocab: # <<<<<<<<<<<<<<
+ * # In Vocabulary
+ * word = vocab[token]
+ */
+ __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vocab, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 184, __pyx_L1_error)
+ __pyx_t_5 = (__pyx_t_6 != 0);
+ if (__pyx_t_5) {
+
+ /* "average_inner.pyx":186
+ * if token in vocab:
+ * # In Vocabulary
+ * word = vocab[token] # <<<<<<<<<<<<<<
+ * c.word_indices[eff_words] = word.index
+ * c.subwords_idx_len[eff_words] = ZERO
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetItem(__pyx_v_vocab, __pyx_v_token); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 186, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "average_inner.pyx":187
+ * # In Vocabulary
+ * word = vocab[token]
+ * c.word_indices[eff_words] = word.index # <<<<<<<<<<<<<<
+ * c.subwords_idx_len[eff_words] = ZERO
+ * else:
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 187, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_10 = __Pyx_PyInt_As_npy_uint32(__pyx_t_4); if (unlikely((__pyx_t_10 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 187, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ (__pyx_v_c->word_indices[__pyx_v_eff_words]) = ((__pyx_t_13average_inner_uINT_t)__pyx_t_10);
+
+ /* "average_inner.pyx":188
+ * word = vocab[token]
+ * c.word_indices[eff_words] = word.index
+ * c.subwords_idx_len[eff_words] = ZERO # <<<<<<<<<<<<<<
+ * else:
+ * # OOV words --> write ngram indices to memory
+ */
+ (__pyx_v_c->subwords_idx_len[__pyx_v_eff_words]) = __pyx_v_13average_inner_ZERO;
+
+ /* "average_inner.pyx":184
+ * for token in obj[0]:
+ * c.sent_adresses[eff_words] = obj[1]
+ * if token in vocab: # <<<<<<<<<<<<<<
+ * # In Vocabulary
+ * word = vocab[token]
+ */
+ goto __pyx_L8;
+ }
+
+ /* "average_inner.pyx":191
+ * else:
+ * # OOV words --> write ngram indices to memory
+ * c.word_indices[eff_words] = ZERO # <<<<<<<<<<<<<<
+ *
+ * encoded_ngrams = compute_ngrams_bytes(token, c.min_n, c.max_n)
+ */
+ /*else*/ {
+ (__pyx_v_c->word_indices[__pyx_v_eff_words]) = __pyx_v_13average_inner_ZERO;
+
+ /* "average_inner.pyx":193
+ * c.word_indices[eff_words] = ZERO
+ *
+ * encoded_ngrams = compute_ngrams_bytes(token, c.min_n, c.max_n) # <<<<<<<<<<<<<<
+ * hashes = [ft_hash_bytes(n) % c.bucket for n in encoded_ngrams]
+ *
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_11, __pyx_n_s_compute_ngrams_bytes); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 193, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_11);
+ __pyx_t_12 = __Pyx_PyInt_From_int(__pyx_v_c->min_n); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 193, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_12);
+ __pyx_t_13 = __Pyx_PyInt_From_int(__pyx_v_c->max_n); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 193, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_13);
+ __pyx_t_14 = NULL;
+ __pyx_t_15 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_11))) {
+ __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_11);
+ if (likely(__pyx_t_14)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_11);
+ __Pyx_INCREF(__pyx_t_14);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_11, function);
+ __pyx_t_15 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_11)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_v_token, __pyx_t_12, __pyx_t_13};
+ __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_11, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 193, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0;
+ __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_11)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_v_token, __pyx_t_12, __pyx_t_13};
+ __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_11, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 193, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0;
+ __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_16 = PyTuple_New(3+__pyx_t_15); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 193, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_16);
+ if (__pyx_t_14) {
+ __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_16, 0, __pyx_t_14); __pyx_t_14 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_token);
+ __Pyx_GIVEREF(__pyx_v_token);
+ PyTuple_SET_ITEM(__pyx_t_16, 0+__pyx_t_15, __pyx_v_token);
+ __Pyx_GIVEREF(__pyx_t_12);
+ PyTuple_SET_ITEM(__pyx_t_16, 1+__pyx_t_15, __pyx_t_12);
+ __Pyx_GIVEREF(__pyx_t_13);
+ PyTuple_SET_ITEM(__pyx_t_16, 2+__pyx_t_15, __pyx_t_13);
+ __pyx_t_12 = 0;
+ __pyx_t_13 = 0;
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_11, __pyx_t_16, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 193, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0;
+ __Pyx_XDECREF_SET(__pyx_v_encoded_ngrams, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "average_inner.pyx":194
+ *
+ * encoded_ngrams = compute_ngrams_bytes(token, c.min_n, c.max_n)
+ * hashes = [ft_hash_bytes(n) % c.bucket for n in encoded_ngrams] # <<<<<<<<<<<<<<
+ *
+ * c.subwords_idx_len[eff_words] = min(len(encoded_ngrams), MAX_NGRAMS)
+ */
+ __pyx_t_4 = PyList_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 194, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (likely(PyList_CheckExact(__pyx_v_encoded_ngrams)) || PyTuple_CheckExact(__pyx_v_encoded_ngrams)) {
+ __pyx_t_11 = __pyx_v_encoded_ngrams; __Pyx_INCREF(__pyx_t_11); __pyx_t_17 = 0;
+ __pyx_t_18 = NULL;
+ } else {
+ __pyx_t_17 = -1; __pyx_t_11 = PyObject_GetIter(__pyx_v_encoded_ngrams); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 194, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_11);
+ __pyx_t_18 = Py_TYPE(__pyx_t_11)->tp_iternext; if (unlikely(!__pyx_t_18)) __PYX_ERR(0, 194, __pyx_L1_error)
+ }
+ for (;;) {
+ if (likely(!__pyx_t_18)) {
+ if (likely(PyList_CheckExact(__pyx_t_11))) {
+ if (__pyx_t_17 >= PyList_GET_SIZE(__pyx_t_11)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_16 = PyList_GET_ITEM(__pyx_t_11, __pyx_t_17); __Pyx_INCREF(__pyx_t_16); __pyx_t_17++; if (unlikely(0 < 0)) __PYX_ERR(0, 194, __pyx_L1_error)
+ #else
+ __pyx_t_16 = PySequence_ITEM(__pyx_t_11, __pyx_t_17); __pyx_t_17++; if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 194, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_16);
+ #endif
+ } else {
+ if (__pyx_t_17 >= PyTuple_GET_SIZE(__pyx_t_11)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_16 = PyTuple_GET_ITEM(__pyx_t_11, __pyx_t_17); __Pyx_INCREF(__pyx_t_16); __pyx_t_17++; if (unlikely(0 < 0)) __PYX_ERR(0, 194, __pyx_L1_error)
+ #else
+ __pyx_t_16 = PySequence_ITEM(__pyx_t_11, __pyx_t_17); __pyx_t_17++; if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 194, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_16);
+ #endif
+ }
+ } else {
+ __pyx_t_16 = __pyx_t_18(__pyx_t_11);
+ if (unlikely(!__pyx_t_16)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 194, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_16);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_n, __pyx_t_16);
+ __pyx_t_16 = 0;
+ __Pyx_GetModuleGlobalName(__pyx_t_13, __pyx_n_s_ft_hash_bytes); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 194, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_13);
+ __pyx_t_12 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_13))) {
+ __pyx_t_12 = PyMethod_GET_SELF(__pyx_t_13);
+ if (likely(__pyx_t_12)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_13);
+ __Pyx_INCREF(__pyx_t_12);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_13, function);
+ }
+ }
+ __pyx_t_16 = (__pyx_t_12) ? __Pyx_PyObject_Call2Args(__pyx_t_13, __pyx_t_12, __pyx_v_n) : __Pyx_PyObject_CallOneArg(__pyx_t_13, __pyx_v_n);
+ __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0;
+ if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 194, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_16);
+ __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
+ __pyx_t_13 = __Pyx_PyInt_From_int(__pyx_v_c->bucket); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 194, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_13);
+ __pyx_t_12 = PyNumber_Remainder(__pyx_t_16, __pyx_t_13); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 194, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_12);
+ __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0;
+ __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
+ if (unlikely(__Pyx_ListComp_Append(__pyx_t_4, (PyObject*)__pyx_t_12))) __PYX_ERR(0, 194, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0;
+ __Pyx_XDECREF_SET(__pyx_v_hashes, ((PyObject*)__pyx_t_4));
+ __pyx_t_4 = 0;
+
+ /* "average_inner.pyx":196
+ * hashes = [ft_hash_bytes(n) % c.bucket for n in encoded_ngrams]
+ *
+ * c.subwords_idx_len[eff_words] = min(len(encoded_ngrams), MAX_NGRAMS) # <<<<<<<<<<<<<<
+ * for i, h in enumerate(hashes[:MAX_NGRAMS]):
+ * c.subwords_idx[(eff_words * MAX_NGRAMS) + i] = h
+ */
+ __pyx_t_19 = 40;
+ __pyx_t_17 = PyObject_Length(__pyx_v_encoded_ngrams); if (unlikely(__pyx_t_17 == ((Py_ssize_t)-1))) __PYX_ERR(0, 196, __pyx_L1_error)
+ if (((__pyx_t_19 < __pyx_t_17) != 0)) {
+ __pyx_t_20 = __pyx_t_19;
+ } else {
+ __pyx_t_20 = __pyx_t_17;
+ }
+ (__pyx_v_c->subwords_idx_len[__pyx_v_eff_words]) = ((__pyx_t_13average_inner_uINT_t)__pyx_t_20);
+
+ /* "average_inner.pyx":197
+ *
+ * c.subwords_idx_len[eff_words] = min(len(encoded_ngrams), MAX_NGRAMS)
+ * for i, h in enumerate(hashes[:MAX_NGRAMS]): # <<<<<<<<<<<<<<
+ * c.subwords_idx[(eff_words * MAX_NGRAMS) + i] = h
+ *
+ */
+ __Pyx_INCREF(__pyx_int_0);
+ __pyx_t_4 = __pyx_int_0;
+ __pyx_t_11 = __Pyx_PyList_GetSlice(__pyx_v_hashes, 0, 40); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 197, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_11);
+ __pyx_t_12 = __pyx_t_11; __Pyx_INCREF(__pyx_t_12); __pyx_t_20 = 0;
+ __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0;
+ for (;;) {
+ if (__pyx_t_20 >= PyList_GET_SIZE(__pyx_t_12)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_11 = PyList_GET_ITEM(__pyx_t_12, __pyx_t_20); __Pyx_INCREF(__pyx_t_11); __pyx_t_20++; if (unlikely(0 < 0)) __PYX_ERR(0, 197, __pyx_L1_error)
+ #else
+ __pyx_t_11 = PySequence_ITEM(__pyx_t_12, __pyx_t_20); __pyx_t_20++; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 197, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_11);
+ #endif
+ __Pyx_XDECREF_SET(__pyx_v_h, __pyx_t_11);
+ __pyx_t_11 = 0;
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_4);
+ __pyx_t_11 = __Pyx_PyInt_AddObjC(__pyx_t_4, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 197, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_11);
+ __Pyx_DECREF(__pyx_t_4);
+ __pyx_t_4 = __pyx_t_11;
+ __pyx_t_11 = 0;
+
+ /* "average_inner.pyx":198
+ * c.subwords_idx_len[eff_words] = min(len(encoded_ngrams), MAX_NGRAMS)
+ * for i, h in enumerate(hashes[:MAX_NGRAMS]):
+ * c.subwords_idx[(eff_words * MAX_NGRAMS) + i] = h # <<<<<<<<<<<<<<
+ *
+ * eff_words += ONE
+ */
+ __pyx_t_10 = __Pyx_PyInt_As_npy_uint32(__pyx_v_h); if (unlikely((__pyx_t_10 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 198, __pyx_L1_error)
+ __pyx_t_11 = __Pyx_PyInt_From_long((__pyx_v_eff_words * 40)); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 198, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_11);
+ __pyx_t_13 = PyNumber_Add(__pyx_t_11, __pyx_v_i); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 198, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_13);
+ __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0;
+ __pyx_t_17 = __Pyx_PyIndex_AsSsize_t(__pyx_t_13); if (unlikely((__pyx_t_17 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 198, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
+ (__pyx_v_c->subwords_idx[__pyx_t_17]) = ((__pyx_t_13average_inner_uINT_t)__pyx_t_10);
+
+ /* "average_inner.pyx":197
+ *
+ * c.subwords_idx_len[eff_words] = min(len(encoded_ngrams), MAX_NGRAMS)
+ * for i, h in enumerate(hashes[:MAX_NGRAMS]): # <<<<<<<<<<<<<<
+ * c.subwords_idx[(eff_words * MAX_NGRAMS) + i] = h
+ *
+ */
+ }
+ __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __pyx_L8:;
+
+ /* "average_inner.pyx":200
+ * c.subwords_idx[(eff_words * MAX_NGRAMS) + i] = h
+ *
+ * eff_words += ONE # <<<<<<<<<<<<<<
+ *
+ * if eff_words == MAX_WORDS:
+ */
+ __pyx_v_eff_words = (__pyx_v_eff_words + __pyx_v_13average_inner_ONE);
+
+ /* "average_inner.pyx":202
+ * eff_words += ONE
+ *
+ * if eff_words == MAX_WORDS: # <<<<<<<<<<<<<<
+ * break
+ *
+ */
+ __pyx_t_5 = ((__pyx_v_eff_words == 0x2710) != 0);
+ if (__pyx_t_5) {
+
+ /* "average_inner.pyx":203
+ *
+ * if eff_words == MAX_WORDS:
+ * break # <<<<<<<<<<<<<<
+ *
+ * eff_sents += 1
+ */
+ goto __pyx_L7_break;
+
+ /* "average_inner.pyx":202
+ * eff_words += ONE
+ *
+ * if eff_words == MAX_WORDS: # <<<<<<<<<<<<<<
+ * break
+ *
+ */
+ }
+
+ /* "average_inner.pyx":182
+ * if not obj[0]:
+ * continue
+ * for token in obj[0]: # <<<<<<<<<<<<<<
+ * c.sent_adresses[eff_words] = obj[1]
+ * if token in vocab:
+ */
+ }
+ __pyx_L7_break:;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+
+ /* "average_inner.pyx":205
+ * break
+ *
+ * eff_sents += 1 # <<<<<<<<<<<<<<
+ * c.sentence_boundary[eff_sents] = eff_words
+ *
+ */
+ __pyx_v_eff_sents = (__pyx_v_eff_sents + 1);
+
+ /* "average_inner.pyx":206
+ *
+ * eff_sents += 1
+ * c.sentence_boundary[eff_sents] = eff_words # <<<<<<<<<<<<<<
+ *
+ * if eff_words == MAX_WORDS:
+ */
+ (__pyx_v_c->sentence_boundary[__pyx_v_eff_sents]) = __pyx_v_eff_words;
+
+ /* "average_inner.pyx":208
+ * c.sentence_boundary[eff_sents] = eff_words
+ *
+ * if eff_words == MAX_WORDS: # <<<<<<<<<<<<<<
+ * break
+ *
+ */
+ __pyx_t_5 = ((__pyx_v_eff_words == 0x2710) != 0);
+ if (__pyx_t_5) {
+
+ /* "average_inner.pyx":209
+ *
+ * if eff_words == MAX_WORDS:
+ * break # <<<<<<<<<<<<<<
+ *
+ * return eff_sents, eff_words
+ */
+ goto __pyx_L4_break;
+
+ /* "average_inner.pyx":208
+ * c.sentence_boundary[eff_sents] = eff_words
+ *
+ * if eff_words == MAX_WORDS: # <<<<<<<<<<<<<<
+ * break
+ *
+ */
+ }
+
+ /* "average_inner.pyx":179
+ * c.sentence_boundary[0] = ZERO
+ *
+ * for obj in indexed_sentences: # <<<<<<<<<<<<<<
+ * if not obj[0]:
+ * continue
+ */
+ __pyx_L3_continue:;
+ }
+ __pyx_L4_break:;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":211
+ * break
+ *
+ * return eff_sents, eff_words # <<<<<<<<<<<<<<
+ *
+ * cdef void compute_base_sentence_averages(BaseSentenceVecsConfig *c, uINT_t num_sentences) nogil:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_From_npy_uint32(__pyx_v_eff_sents); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 211, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_7 = __Pyx_PyInt_From_npy_uint32(__pyx_v_eff_words); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 211, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 211, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_7);
+ __pyx_t_1 = 0;
+ __pyx_t_7 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "average_inner.pyx":150
+ * return eff_sents, eff_words
+ *
+ * cdef object populate_ft_s2v_config(FTSentenceVecsConfig *c, vocab, indexed_sentences): # <<<<<<<<<<<<<<
+ * """Prepare C structures for FastText so we can go "full C" and release the Python GIL.
+ *
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_11);
+ __Pyx_XDECREF(__pyx_t_12);
+ __Pyx_XDECREF(__pyx_t_13);
+ __Pyx_XDECREF(__pyx_t_14);
+ __Pyx_XDECREF(__pyx_t_16);
+ __Pyx_AddTraceback("average_inner.populate_ft_s2v_config", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_obj);
+ __Pyx_XDECREF(__pyx_v_token);
+ __Pyx_XDECREF(__pyx_v_word);
+ __Pyx_XDECREF(__pyx_v_encoded_ngrams);
+ __Pyx_XDECREF(__pyx_v_hashes);
+ __Pyx_XDECREF(__pyx_v_i);
+ __Pyx_XDECREF(__pyx_v_h);
+ __Pyx_XDECREF(__pyx_v_n);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "average_inner.pyx":213
+ * return eff_sents, eff_words
+ *
+ * cdef void compute_base_sentence_averages(BaseSentenceVecsConfig *c, uINT_t num_sentences) nogil: # <<<<<<<<<<<<<<
+ * """Perform optimized sentence-level averaging for BaseAny2Vec model.
+ *
+ */
+
+static void __pyx_f_13average_inner_compute_base_sentence_averages(struct __pyx_t_13average_inner_BaseSentenceVecsConfig *__pyx_v_c, __pyx_t_13average_inner_uINT_t __pyx_v_num_sentences) {
+ int __pyx_v_size;
+ __pyx_t_13average_inner_uINT_t __pyx_v_sent_idx;
+ __pyx_t_13average_inner_uINT_t __pyx_v_sent_start;
+ __pyx_t_13average_inner_uINT_t __pyx_v_sent_end;
+ __pyx_t_13average_inner_uINT_t __pyx_v_sent_row;
+ __pyx_t_13average_inner_uINT_t __pyx_v_i;
+ __pyx_t_13average_inner_uINT_t __pyx_v_word_idx;
+ __pyx_t_13average_inner_uINT_t __pyx_v_word_row;
+ __pyx_t_13average_inner_REAL_t __pyx_v_sent_len;
+ __pyx_t_13average_inner_REAL_t __pyx_v_inv_count;
+ int __pyx_t_1;
+ __pyx_t_13average_inner_uINT_t __pyx_t_2;
+ __pyx_t_13average_inner_uINT_t __pyx_t_3;
+ __pyx_t_13average_inner_uINT_t __pyx_t_4;
+ __pyx_t_13average_inner_uINT_t __pyx_t_5;
+ __pyx_t_13average_inner_uINT_t __pyx_t_6;
+ __pyx_t_13average_inner_uINT_t __pyx_t_7;
+ int __pyx_t_8;
+
+ /* "average_inner.pyx":229
+ * """
+ * cdef:
+ * int size = c.size # <<<<<<<<<<<<<<
+ *
+ * uINT_t sent_idx, sent_start, sent_end, sent_row
+ */
+ __pyx_t_1 = __pyx_v_c->size;
+ __pyx_v_size = __pyx_t_1;
+
+ /* "average_inner.pyx":237
+ * REAL_t sent_len, inv_count
+ *
+ * for sent_idx in range(num_sentences): # <<<<<<<<<<<<<<
+ * memset(c.mem, 0, size * cython.sizeof(REAL_t))
+ *
+ */
+ __pyx_t_2 = __pyx_v_num_sentences;
+ __pyx_t_3 = __pyx_t_2;
+ for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {
+ __pyx_v_sent_idx = __pyx_t_4;
+
+ /* "average_inner.pyx":238
+ *
+ * for sent_idx in range(num_sentences):
+ * memset(c.mem, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<<
+ *
+ * sent_start = c.sentence_boundary[sent_idx]
+ */
+ (void)(memset(__pyx_v_c->mem, 0, (__pyx_v_size * (sizeof(__pyx_t_13average_inner_REAL_t)))));
+
+ /* "average_inner.pyx":240
+ * memset(c.mem, 0, size * cython.sizeof(REAL_t))
+ *
+ * sent_start = c.sentence_boundary[sent_idx] # <<<<<<<<<<<<<<
+ * sent_end = c.sentence_boundary[sent_idx + 1]
+ * sent_len = ZEROF
+ */
+ __pyx_v_sent_start = (__pyx_v_c->sentence_boundary[__pyx_v_sent_idx]);
+
+ /* "average_inner.pyx":241
+ *
+ * sent_start = c.sentence_boundary[sent_idx]
+ * sent_end = c.sentence_boundary[sent_idx + 1] # <<<<<<<<<<<<<<
+ * sent_len = ZEROF
+ *
+ */
+ __pyx_v_sent_end = (__pyx_v_c->sentence_boundary[(__pyx_v_sent_idx + 1)]);
+
+ /* "average_inner.pyx":242
+ * sent_start = c.sentence_boundary[sent_idx]
+ * sent_end = c.sentence_boundary[sent_idx + 1]
+ * sent_len = ZEROF # <<<<<<<<<<<<<<
+ *
+ * for i in range(sent_start, sent_end):
+ */
+ __pyx_v_sent_len = __pyx_v_13average_inner_ZEROF;
+
+ /* "average_inner.pyx":244
+ * sent_len = ZEROF
+ *
+ * for i in range(sent_start, sent_end): # <<<<<<<<<<<<<<
+ * sent_len += ONEF
+ * sent_row = c.sent_adresses[i] * size
+ */
+ __pyx_t_5 = __pyx_v_sent_end;
+ __pyx_t_6 = __pyx_t_5;
+ for (__pyx_t_7 = __pyx_v_sent_start; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) {
+ __pyx_v_i = __pyx_t_7;
+
+ /* "average_inner.pyx":245
+ *
+ * for i in range(sent_start, sent_end):
+ * sent_len += ONEF # <<<<<<<<<<<<<<
+ * sent_row = c.sent_adresses[i] * size
+ * word_row = c.word_indices[i] * size
+ */
+ __pyx_v_sent_len = (__pyx_v_sent_len + __pyx_v_13average_inner_ONEF);
+
+ /* "average_inner.pyx":246
+ * for i in range(sent_start, sent_end):
+ * sent_len += ONEF
+ * sent_row = c.sent_adresses[i] * size # <<<<<<<<<<<<<<
+ * word_row = c.word_indices[i] * size
+ * word_idx = c.word_indices[i]
+ */
+ __pyx_v_sent_row = ((__pyx_v_c->sent_adresses[__pyx_v_i]) * __pyx_v_size);
+
+ /* "average_inner.pyx":247
+ * sent_len += ONEF
+ * sent_row = c.sent_adresses[i] * size
+ * word_row = c.word_indices[i] * size # <<<<<<<<<<<<<<
+ * word_idx = c.word_indices[i]
+ *
+ */
+ __pyx_v_word_row = ((__pyx_v_c->word_indices[__pyx_v_i]) * __pyx_v_size);
+
+ /* "average_inner.pyx":248
+ * sent_row = c.sent_adresses[i] * size
+ * word_row = c.word_indices[i] * size
+ * word_idx = c.word_indices[i] # <<<<<<<<<<<<<<
+ *
+ * saxpy(&size, &c.word_weights[word_idx], &c.word_vectors[word_row], &ONE, c.mem, &ONE)
+ */
+ __pyx_v_word_idx = (__pyx_v_c->word_indices[__pyx_v_i]);
+
+ /* "average_inner.pyx":250
+ * word_idx = c.word_indices[i]
+ *
+ * saxpy(&size, &c.word_weights[word_idx], &c.word_vectors[word_row], &ONE, c.mem, &ONE) # <<<<<<<<<<<<<<
+ *
+ * if sent_len > ZEROF:
+ */
+ __pyx_v_13average_inner_saxpy((&__pyx_v_size), (&(__pyx_v_c->word_weights[__pyx_v_word_idx])), (&(__pyx_v_c->word_vectors[__pyx_v_word_row])), (&__pyx_v_13average_inner_ONE), __pyx_v_c->mem, (&__pyx_v_13average_inner_ONE));
+ }
+
+ /* "average_inner.pyx":252
+ * saxpy(&size, &c.word_weights[word_idx], &c.word_vectors[word_row], &ONE, c.mem, &ONE)
+ *
+ * if sent_len > ZEROF: # <<<<<<<<<<<<<<
+ * inv_count = ONEF / sent_len
+ * # If we perform the a*x on memory, the computation is compatible with many-to-one mappings
+ */
+ __pyx_t_8 = ((__pyx_v_sent_len > __pyx_v_13average_inner_ZEROF) != 0);
+ if (__pyx_t_8) {
+
+ /* "average_inner.pyx":253
+ *
+ * if sent_len > ZEROF:
+ * inv_count = ONEF / sent_len # <<<<<<<<<<<<<<
+ * # If we perform the a*x on memory, the computation is compatible with many-to-one mappings
+ * # because it doesn't rescale the overall result
+ */
+ __pyx_v_inv_count = (__pyx_v_13average_inner_ONEF / __pyx_v_sent_len);
+
+ /* "average_inner.pyx":256
+ * # If we perform the a*x on memory, the computation is compatible with many-to-one mappings
+ * # because it doesn't rescale the overall result
+ * saxpy(&size, &inv_count, c.mem, &ONE, &c.sentence_vectors[sent_row], &ONE) # <<<<<<<<<<<<<<
+ *
+ * cdef void compute_ft_sentence_averages(FTSentenceVecsConfig *c, uINT_t num_sentences) nogil:
+ */
+ __pyx_v_13average_inner_saxpy((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_c->mem, (&__pyx_v_13average_inner_ONE), (&(__pyx_v_c->sentence_vectors[__pyx_v_sent_row])), (&__pyx_v_13average_inner_ONE));
+
+ /* "average_inner.pyx":252
+ * saxpy(&size, &c.word_weights[word_idx], &c.word_vectors[word_row], &ONE, c.mem, &ONE)
+ *
+ * if sent_len > ZEROF: # <<<<<<<<<<<<<<
+ * inv_count = ONEF / sent_len
+ * # If we perform the a*x on memory, the computation is compatible with many-to-one mappings
+ */
+ }
+ }
+
+ /* "average_inner.pyx":213
+ * return eff_sents, eff_words
+ *
+ * cdef void compute_base_sentence_averages(BaseSentenceVecsConfig *c, uINT_t num_sentences) nogil: # <<<<<<<<<<<<<<
+ * """Perform optimized sentence-level averaging for BaseAny2Vec model.
+ *
+ */
+
+ /* function exit code */
+}
+
+/* "average_inner.pyx":258
+ * saxpy(&size, &inv_count, c.mem, &ONE, &c.sentence_vectors[sent_row], &ONE)
+ *
+ * cdef void compute_ft_sentence_averages(FTSentenceVecsConfig *c, uINT_t num_sentences) nogil: # <<<<<<<<<<<<<<
+ * """Perform optimized sentence-level averaging for FastText model.
+ *
+ */
+
+static void __pyx_f_13average_inner_compute_ft_sentence_averages(struct __pyx_t_13average_inner_FTSentenceVecsConfig *__pyx_v_c, __pyx_t_13average_inner_uINT_t __pyx_v_num_sentences) {
+ int __pyx_v_size;
+ __pyx_t_13average_inner_uINT_t __pyx_v_sent_idx;
+ __pyx_t_13average_inner_uINT_t __pyx_v_sent_start;
+ __pyx_t_13average_inner_uINT_t __pyx_v_sent_end;
+ __pyx_t_13average_inner_uINT_t __pyx_v_sent_row;
+ __pyx_t_13average_inner_uINT_t __pyx_v_ngram_row;
+ __pyx_t_13average_inner_uINT_t __pyx_v_ngrams;
+ __pyx_t_13average_inner_uINT_t __pyx_v_i;
+ __pyx_t_13average_inner_uINT_t __pyx_v_j;
+ __pyx_t_13average_inner_uINT_t __pyx_v_word_idx;
+ __pyx_t_13average_inner_uINT_t __pyx_v_word_row;
+ __pyx_t_13average_inner_REAL_t __pyx_v_sent_len;
+ __pyx_t_13average_inner_REAL_t __pyx_v_inv_count;
+ __pyx_t_13average_inner_REAL_t __pyx_v_inv_ngram;
+ CYTHON_UNUSED __pyx_t_13average_inner_REAL_t __pyx_v_oov_weight;
+ int __pyx_t_1;
+ __pyx_t_13average_inner_REAL_t __pyx_t_2;
+ __pyx_t_13average_inner_uINT_t __pyx_t_3;
+ __pyx_t_13average_inner_uINT_t __pyx_t_4;
+ __pyx_t_13average_inner_uINT_t __pyx_t_5;
+ __pyx_t_13average_inner_uINT_t __pyx_t_6;
+ __pyx_t_13average_inner_uINT_t __pyx_t_7;
+ __pyx_t_13average_inner_uINT_t __pyx_t_8;
+ int __pyx_t_9;
+ __pyx_t_13average_inner_uINT_t __pyx_t_10;
+ __pyx_t_13average_inner_uINT_t __pyx_t_11;
+ __pyx_t_13average_inner_uINT_t __pyx_t_12;
+
+ /* "average_inner.pyx":274
+ * """
+ * cdef:
+ * int size = c.size # <<<<<<<<<<<<<<
+ *
+ * uINT_t sent_idx, sent_start, sent_end, sent_row
+ */
+ __pyx_t_1 = __pyx_v_c->size;
+ __pyx_v_size = __pyx_t_1;
+
+ /* "average_inner.pyx":284
+ * REAL_t sent_len
+ * REAL_t inv_count, inv_ngram
+ * REAL_t oov_weight = c.oov_weight # <<<<<<<<<<<<<<
+ *
+ *
+ */
+ __pyx_t_2 = __pyx_v_c->oov_weight;
+ __pyx_v_oov_weight = __pyx_t_2;
+
+ /* "average_inner.pyx":287
+ *
+ *
+ * for sent_idx in range(num_sentences): # <<<<<<<<<<<<<<
+ * memset(c.mem, 0, size * cython.sizeof(REAL_t))
+ * sent_start = c.sentence_boundary[sent_idx]
+ */
+ __pyx_t_3 = __pyx_v_num_sentences;
+ __pyx_t_4 = __pyx_t_3;
+ for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) {
+ __pyx_v_sent_idx = __pyx_t_5;
+
+ /* "average_inner.pyx":288
+ *
+ * for sent_idx in range(num_sentences):
+ * memset(c.mem, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<<
+ * sent_start = c.sentence_boundary[sent_idx]
+ * sent_end = c.sentence_boundary[sent_idx + 1]
+ */
+ (void)(memset(__pyx_v_c->mem, 0, (__pyx_v_size * (sizeof(__pyx_t_13average_inner_REAL_t)))));
+
+ /* "average_inner.pyx":289
+ * for sent_idx in range(num_sentences):
+ * memset(c.mem, 0, size * cython.sizeof(REAL_t))
+ * sent_start = c.sentence_boundary[sent_idx] # <<<<<<<<<<<<<<
+ * sent_end = c.sentence_boundary[sent_idx + 1]
+ * sent_len = ZEROF
+ */
+ __pyx_v_sent_start = (__pyx_v_c->sentence_boundary[__pyx_v_sent_idx]);
+
+ /* "average_inner.pyx":290
+ * memset(c.mem, 0, size * cython.sizeof(REAL_t))
+ * sent_start = c.sentence_boundary[sent_idx]
+ * sent_end = c.sentence_boundary[sent_idx + 1] # <<<<<<<<<<<<<<
+ * sent_len = ZEROF
+ *
+ */
+ __pyx_v_sent_end = (__pyx_v_c->sentence_boundary[(__pyx_v_sent_idx + 1)]);
+
+ /* "average_inner.pyx":291
+ * sent_start = c.sentence_boundary[sent_idx]
+ * sent_end = c.sentence_boundary[sent_idx + 1]
+ * sent_len = ZEROF # <<<<<<<<<<<<<<
+ *
+ * for i in range(sent_start, sent_end):
+ */
+ __pyx_v_sent_len = __pyx_v_13average_inner_ZEROF;
+
+ /* "average_inner.pyx":293
+ * sent_len = ZEROF
+ *
+ * for i in range(sent_start, sent_end): # <<<<<<<<<<<<<<
+ * sent_len += ONEF
+ * sent_row = c.sent_adresses[i] * size
+ */
+ __pyx_t_6 = __pyx_v_sent_end;
+ __pyx_t_7 = __pyx_t_6;
+ for (__pyx_t_8 = __pyx_v_sent_start; __pyx_t_8 < __pyx_t_7; __pyx_t_8+=1) {
+ __pyx_v_i = __pyx_t_8;
+
+ /* "average_inner.pyx":294
+ *
+ * for i in range(sent_start, sent_end):
+ * sent_len += ONEF # <<<<<<<<<<<<<<
+ * sent_row = c.sent_adresses[i] * size
+ *
+ */
+ __pyx_v_sent_len = (__pyx_v_sent_len + __pyx_v_13average_inner_ONEF);
+
+ /* "average_inner.pyx":295
+ * for i in range(sent_start, sent_end):
+ * sent_len += ONEF
+ * sent_row = c.sent_adresses[i] * size # <<<<<<<<<<<<<<
+ *
+ * word_idx = c.word_indices[i]
+ */
+ __pyx_v_sent_row = ((__pyx_v_c->sent_adresses[__pyx_v_i]) * __pyx_v_size);
+
+ /* "average_inner.pyx":297
+ * sent_row = c.sent_adresses[i] * size
+ *
+ * word_idx = c.word_indices[i] # <<<<<<<<<<<<<<
+ * ngrams = c.subwords_idx_len[i]
+ *
+ */
+ __pyx_v_word_idx = (__pyx_v_c->word_indices[__pyx_v_i]);
+
+ /* "average_inner.pyx":298
+ *
+ * word_idx = c.word_indices[i]
+ * ngrams = c.subwords_idx_len[i] # <<<<<<<<<<<<<<
+ *
+ * if ngrams == 0:
+ */
+ __pyx_v_ngrams = (__pyx_v_c->subwords_idx_len[__pyx_v_i]);
+
+ /* "average_inner.pyx":300
+ * ngrams = c.subwords_idx_len[i]
+ *
+ * if ngrams == 0: # <<<<<<<<<<<<<<
+ * word_row = c.word_indices[i] * size
+ * saxpy(&size, &c.word_weights[word_idx], &c.word_vectors[word_row], &ONE, c.mem, &ONE)
+ */
+ __pyx_t_9 = ((__pyx_v_ngrams == 0) != 0);
+ if (__pyx_t_9) {
+
+ /* "average_inner.pyx":301
+ *
+ * if ngrams == 0:
+ * word_row = c.word_indices[i] * size # <<<<<<<<<<<<<<
+ * saxpy(&size, &c.word_weights[word_idx], &c.word_vectors[word_row], &ONE, c.mem, &ONE)
+ * else:
+ */
+ __pyx_v_word_row = ((__pyx_v_c->word_indices[__pyx_v_i]) * __pyx_v_size);
+
+ /* "average_inner.pyx":302
+ * if ngrams == 0:
+ * word_row = c.word_indices[i] * size
+ * saxpy(&size, &c.word_weights[word_idx], &c.word_vectors[word_row], &ONE, c.mem, &ONE) # <<<<<<<<<<<<<<
+ * else:
+ * inv_ngram = (ONEF / ngrams) * c.oov_weight
+ */
+ __pyx_v_13average_inner_saxpy((&__pyx_v_size), (&(__pyx_v_c->word_weights[__pyx_v_word_idx])), (&(__pyx_v_c->word_vectors[__pyx_v_word_row])), (&__pyx_v_13average_inner_ONE), __pyx_v_c->mem, (&__pyx_v_13average_inner_ONE));
+
+ /* "average_inner.pyx":300
+ * ngrams = c.subwords_idx_len[i]
+ *
+ * if ngrams == 0: # <<<<<<<<<<<<<<
+ * word_row = c.word_indices[i] * size
+ * saxpy(&size, &c.word_weights[word_idx], &c.word_vectors[word_row], &ONE, c.mem, &ONE)
+ */
+ goto __pyx_L7;
+ }
+
+ /* "average_inner.pyx":304
+ * saxpy(&size, &c.word_weights[word_idx], &c.word_vectors[word_row], &ONE, c.mem, &ONE)
+ * else:
+ * inv_ngram = (ONEF / ngrams) * c.oov_weight # <<<<<<<<<<<<<<
+ * for j in range(ngrams):
+ * ngram_row = c.subwords_idx[(i * MAX_NGRAMS)+j] * size
+ */
+ /*else*/ {
+ __pyx_v_inv_ngram = ((__pyx_v_13average_inner_ONEF / ((__pyx_t_13average_inner_REAL_t)__pyx_v_ngrams)) * __pyx_v_c->oov_weight);
+
+ /* "average_inner.pyx":305
+ * else:
+ * inv_ngram = (ONEF / ngrams) * c.oov_weight
+ * for j in range(ngrams): # <<<<<<<<<<<<<<
+ * ngram_row = c.subwords_idx[(i * MAX_NGRAMS)+j] * size
+ * saxpy(&size, &inv_ngram, &c.ngram_vectors[ngram_row], &ONE, c.mem, &ONE)
+ */
+ __pyx_t_10 = __pyx_v_ngrams;
+ __pyx_t_11 = __pyx_t_10;
+ for (__pyx_t_12 = 0; __pyx_t_12 < __pyx_t_11; __pyx_t_12+=1) {
+ __pyx_v_j = __pyx_t_12;
+
+ /* "average_inner.pyx":306
+ * inv_ngram = (ONEF / ngrams) * c.oov_weight
+ * for j in range(ngrams):
+ * ngram_row = c.subwords_idx[(i * MAX_NGRAMS)+j] * size # <<<<<<<<<<<<<<
+ * saxpy(&size, &inv_ngram, &c.ngram_vectors[ngram_row], &ONE, c.mem, &ONE)
+ *
+ */
+ __pyx_v_ngram_row = ((__pyx_v_c->subwords_idx[((__pyx_v_i * 40) + __pyx_v_j)]) * __pyx_v_size);
+
+ /* "average_inner.pyx":307
+ * for j in range(ngrams):
+ * ngram_row = c.subwords_idx[(i * MAX_NGRAMS)+j] * size
+ * saxpy(&size, &inv_ngram, &c.ngram_vectors[ngram_row], &ONE, c.mem, &ONE) # <<<<<<<<<<<<<<
+ *
+ * if sent_len > ZEROF:
+ */
+ __pyx_v_13average_inner_saxpy((&__pyx_v_size), (&__pyx_v_inv_ngram), (&(__pyx_v_c->ngram_vectors[__pyx_v_ngram_row])), (&__pyx_v_13average_inner_ONE), __pyx_v_c->mem, (&__pyx_v_13average_inner_ONE));
+ }
+ }
+ __pyx_L7:;
+ }
+
+ /* "average_inner.pyx":309
+ * saxpy(&size, &inv_ngram, &c.ngram_vectors[ngram_row], &ONE, c.mem, &ONE)
+ *
+ * if sent_len > ZEROF: # <<<<<<<<<<<<<<
+ * inv_count = ONEF / sent_len
+ * saxpy(&size, &inv_count, c.mem, &ONE, &c.sentence_vectors[sent_row], &ONE)
+ */
+ __pyx_t_9 = ((__pyx_v_sent_len > __pyx_v_13average_inner_ZEROF) != 0);
+ if (__pyx_t_9) {
+
+ /* "average_inner.pyx":310
+ *
+ * if sent_len > ZEROF:
+ * inv_count = ONEF / sent_len # <<<<<<<<<<<<<<
+ * saxpy(&size, &inv_count, c.mem, &ONE, &c.sentence_vectors[sent_row], &ONE)
+ *
+ */
+ __pyx_v_inv_count = (__pyx_v_13average_inner_ONEF / __pyx_v_sent_len);
+
+ /* "average_inner.pyx":311
+ * if sent_len > ZEROF:
+ * inv_count = ONEF / sent_len
+ * saxpy(&size, &inv_count, c.mem, &ONE, &c.sentence_vectors[sent_row], &ONE) # <<<<<<<<<<<<<<
+ *
+ * def train_average_cy(model, indexed_sentences, target, memory):
+ */
+ __pyx_v_13average_inner_saxpy((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_c->mem, (&__pyx_v_13average_inner_ONE), (&(__pyx_v_c->sentence_vectors[__pyx_v_sent_row])), (&__pyx_v_13average_inner_ONE));
+
+ /* "average_inner.pyx":309
+ * saxpy(&size, &inv_ngram, &c.ngram_vectors[ngram_row], &ONE, c.mem, &ONE)
+ *
+ * if sent_len > ZEROF: # <<<<<<<<<<<<<<
+ * inv_count = ONEF / sent_len
+ * saxpy(&size, &inv_count, c.mem, &ONE, &c.sentence_vectors[sent_row], &ONE)
+ */
+ }
+ }
+
+ /* "average_inner.pyx":258
+ * saxpy(&size, &inv_count, c.mem, &ONE, &c.sentence_vectors[sent_row], &ONE)
+ *
+ * cdef void compute_ft_sentence_averages(FTSentenceVecsConfig *c, uINT_t num_sentences) nogil: # <<<<<<<<<<<<<<
+ * """Perform optimized sentence-level averaging for FastText model.
+ *
+ */
+
+ /* function exit code */
+}
+
+/* "average_inner.pyx":313
+ * saxpy(&size, &inv_count, c.mem, &ONE, &c.sentence_vectors[sent_row], &ONE)
+ *
+ * def train_average_cy(model, indexed_sentences, target, memory): # <<<<<<<<<<<<<<
+ * """Training on a sequence of sentences and update the target ndarray.
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_13average_inner_1train_average_cy(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_13average_inner_train_average_cy[] = "train_average_cy(model, indexed_sentences, target, memory)\nTraining on a sequence of sentences and update the target ndarray.\n\n Called internally from :meth:`~fse.models.average.Average._do_train_job`.\n\n Parameters\n ----------\n model : :class:`~fse.models.base_s2v.BaseSentence2VecModel`\n The BaseSentence2VecModel model instance.\n indexed_sentences : iterable of tuple\n The sentences used to train the model.\n target : ndarray\n The target ndarray. We use the index from indexed_sentences\n to write into the corresponding row of target.\n memory : ndarray\n Private memory for each working thread.\n\n Returns\n -------\n int, int\n Number of effective sentences (non-zero) and effective words in the vocabulary used \n during training the sentence embedding.\n ";
+static PyMethodDef __pyx_mdef_13average_inner_1train_average_cy = {"train_average_cy", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_13average_inner_1train_average_cy, METH_VARARGS|METH_KEYWORDS, __pyx_doc_13average_inner_train_average_cy};
+static PyObject *__pyx_pw_13average_inner_1train_average_cy(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_model = 0;
+ PyObject *__pyx_v_indexed_sentences = 0;
+ PyObject *__pyx_v_target = 0;
+ PyObject *__pyx_v_memory = 0;
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("train_average_cy (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_indexed_sentences,&__pyx_n_s_target,&__pyx_n_s_memory,0};
+ PyObject* values[4] = {0,0,0,0};
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ CYTHON_FALLTHROUGH;
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ CYTHON_FALLTHROUGH;
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ CYTHON_FALLTHROUGH;
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ CYTHON_FALLTHROUGH;
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ CYTHON_FALLTHROUGH;
+ case 1:
+ if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_indexed_sentences)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("train_average_cy", 1, 4, 4, 1); __PYX_ERR(0, 313, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 2:
+ if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_target)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("train_average_cy", 1, 4, 4, 2); __PYX_ERR(0, 313, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 3:
+ if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_memory)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("train_average_cy", 1, 4, 4, 3); __PYX_ERR(0, 313, __pyx_L3_error)
+ }
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_average_cy") < 0)) __PYX_ERR(0, 313, __pyx_L3_error)
+ }
+ } else if (PyTuple_GET_SIZE(__pyx_args) != 4) {
+ goto __pyx_L5_argtuple_error;
+ } else {
+ values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ }
+ __pyx_v_model = values[0];
+ __pyx_v_indexed_sentences = values[1];
+ __pyx_v_target = values[2];
+ __pyx_v_memory = values[3];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("train_average_cy", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 313, __pyx_L3_error)
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("average_inner.train_average_cy", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return NULL;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_13average_inner_train_average_cy(__pyx_self, __pyx_v_model, __pyx_v_indexed_sentences, __pyx_v_target, __pyx_v_memory);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_13average_inner_train_average_cy(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_indexed_sentences, PyObject *__pyx_v_target, PyObject *__pyx_v_memory) {
+ __pyx_t_13average_inner_uINT_t __pyx_v_eff_sentences;
+ __pyx_t_13average_inner_uINT_t __pyx_v_eff_words;
+ struct __pyx_t_13average_inner_BaseSentenceVecsConfig __pyx_v_w2v;
+ struct __pyx_t_13average_inner_FTSentenceVecsConfig __pyx_v_ft;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ int __pyx_t_3;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ PyObject *(*__pyx_t_7)(PyObject *);
+ __pyx_t_13average_inner_uINT_t __pyx_t_8;
+ __pyx_t_13average_inner_uINT_t __pyx_t_9;
+ __Pyx_RefNannySetupContext("train_average_cy", 0);
+
+ /* "average_inner.pyx":337
+ * """
+ *
+ * cdef uINT_t eff_sentences = 0 # <<<<<<<<<<<<<<
+ * cdef uINT_t eff_words = 0
+ * cdef BaseSentenceVecsConfig w2v
+ */
+ __pyx_v_eff_sentences = 0;
+
+ /* "average_inner.pyx":338
+ *
+ * cdef uINT_t eff_sentences = 0
+ * cdef uINT_t eff_words = 0 # <<<<<<<<<<<<<<
+ * cdef BaseSentenceVecsConfig w2v
+ * cdef FTSentenceVecsConfig ft
+ */
+ __pyx_v_eff_words = 0;
+
+ /* "average_inner.pyx":342
+ * cdef FTSentenceVecsConfig ft
+ *
+ * if not model.is_ft: # <<<<<<<<<<<<<<
+ * init_base_s2v_config(&w2v, model, target, memory)
+ *
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_is_ft); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 342, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 342, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = ((!__pyx_t_2) != 0);
+ if (__pyx_t_3) {
+
+ /* "average_inner.pyx":343
+ *
+ * if not model.is_ft:
+ * init_base_s2v_config(&w2v, model, target, memory) # <<<<<<<<<<<<<<
+ *
+ * eff_sentences, eff_words = populate_base_s2v_config(&w2v, model.wv.vocab, indexed_sentences)
+ */
+ __pyx_t_1 = __pyx_f_13average_inner_init_base_s2v_config((&__pyx_v_w2v), __pyx_v_model, __pyx_v_target, __pyx_v_memory); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 343, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":345
+ * init_base_s2v_config(&w2v, model, target, memory)
+ *
+ * eff_sentences, eff_words = populate_base_s2v_config(&w2v, model.wv.vocab, indexed_sentences) # <<<<<<<<<<<<<<
+ *
+ * with nogil:
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 345, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 345, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __pyx_f_13average_inner_populate_base_s2v_config((&__pyx_v_w2v), __pyx_t_4, __pyx_v_indexed_sentences); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 345, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) {
+ PyObject* sequence = __pyx_t_1;
+ Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);
+ if (unlikely(size != 2)) {
+ if (size > 2) __Pyx_RaiseTooManyValuesError(2);
+ else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
+ __PYX_ERR(0, 345, __pyx_L1_error)
+ }
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ if (likely(PyTuple_CheckExact(sequence))) {
+ __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0);
+ __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1);
+ } else {
+ __pyx_t_4 = PyList_GET_ITEM(sequence, 0);
+ __pyx_t_5 = PyList_GET_ITEM(sequence, 1);
+ }
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_t_5);
+ #else
+ __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 345, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 345, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ #endif
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ } else {
+ Py_ssize_t index = -1;
+ __pyx_t_6 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 345, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext;
+ index = 0; __pyx_t_4 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_4)) goto __pyx_L4_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_4);
+ index = 1; __pyx_t_5 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_5)) goto __pyx_L4_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_5);
+ if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 345, __pyx_L1_error)
+ __pyx_t_7 = NULL;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ goto __pyx_L5_unpacking_done;
+ __pyx_L4_unpacking_failed:;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __pyx_t_7 = NULL;
+ if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index);
+ __PYX_ERR(0, 345, __pyx_L1_error)
+ __pyx_L5_unpacking_done:;
+ }
+ __pyx_t_8 = __Pyx_PyInt_As_npy_uint32(__pyx_t_4); if (unlikely((__pyx_t_8 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 345, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_9 = __Pyx_PyInt_As_npy_uint32(__pyx_t_5); if (unlikely((__pyx_t_9 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 345, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_v_eff_sentences = __pyx_t_8;
+ __pyx_v_eff_words = __pyx_t_9;
+
+ /* "average_inner.pyx":347
+ * eff_sentences, eff_words = populate_base_s2v_config(&w2v, model.wv.vocab, indexed_sentences)
+ *
+ * with nogil: # <<<<<<<<<<<<<<
+ * compute_base_sentence_averages(&w2v, eff_sentences)
+ * else:
+ */
+ {
+ #ifdef WITH_THREAD
+ PyThreadState *_save;
+ Py_UNBLOCK_THREADS
+ __Pyx_FastGIL_Remember();
+ #endif
+ /*try:*/ {
+
+ /* "average_inner.pyx":348
+ *
+ * with nogil:
+ * compute_base_sentence_averages(&w2v, eff_sentences) # <<<<<<<<<<<<<<
+ * else:
+ * init_ft_s2v_config(&ft, model, target, memory)
+ */
+ __pyx_f_13average_inner_compute_base_sentence_averages((&__pyx_v_w2v), __pyx_v_eff_sentences);
+ }
+
+ /* "average_inner.pyx":347
+ * eff_sentences, eff_words = populate_base_s2v_config(&w2v, model.wv.vocab, indexed_sentences)
+ *
+ * with nogil: # <<<<<<<<<<<<<<
+ * compute_base_sentence_averages(&w2v, eff_sentences)
+ * else:
+ */
+ /*finally:*/ {
+ /*normal exit:*/{
+ #ifdef WITH_THREAD
+ __Pyx_FastGIL_Forget();
+ Py_BLOCK_THREADS
+ #endif
+ goto __pyx_L8;
+ }
+ __pyx_L8:;
+ }
+ }
+
+ /* "average_inner.pyx":342
+ * cdef FTSentenceVecsConfig ft
+ *
+ * if not model.is_ft: # <<<<<<<<<<<<<<
+ * init_base_s2v_config(&w2v, model, target, memory)
+ *
+ */
+ goto __pyx_L3;
+ }
+
+ /* "average_inner.pyx":350
+ * compute_base_sentence_averages(&w2v, eff_sentences)
+ * else:
+ * init_ft_s2v_config(&ft, model, target, memory) # <<<<<<<<<<<<<<
+ *
+ * eff_sentences, eff_words = populate_ft_s2v_config(&ft, model.wv.vocab, indexed_sentences)
+ */
+ /*else*/ {
+ __pyx_t_1 = __pyx_f_13average_inner_init_ft_s2v_config((&__pyx_v_ft), __pyx_v_model, __pyx_v_target, __pyx_v_memory); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 350, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "average_inner.pyx":352
+ * init_ft_s2v_config(&ft, model, target, memory)
+ *
+ * eff_sentences, eff_words = populate_ft_s2v_config(&ft, model.wv.vocab, indexed_sentences) # <<<<<<<<<<<<<<
+ *
+ * with nogil:
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __pyx_f_13average_inner_populate_ft_s2v_config((&__pyx_v_ft), __pyx_t_5, __pyx_v_indexed_sentences); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) {
+ PyObject* sequence = __pyx_t_1;
+ Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);
+ if (unlikely(size != 2)) {
+ if (size > 2) __Pyx_RaiseTooManyValuesError(2);
+ else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
+ __PYX_ERR(0, 352, __pyx_L1_error)
+ }
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ if (likely(PyTuple_CheckExact(sequence))) {
+ __pyx_t_5 = PyTuple_GET_ITEM(sequence, 0);
+ __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1);
+ } else {
+ __pyx_t_5 = PyList_GET_ITEM(sequence, 0);
+ __pyx_t_4 = PyList_GET_ITEM(sequence, 1);
+ }
+ __Pyx_INCREF(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_4);
+ #else
+ __pyx_t_5 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ } else {
+ Py_ssize_t index = -1;
+ __pyx_t_6 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext;
+ index = 0; __pyx_t_5 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_5)) goto __pyx_L9_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_5);
+ index = 1; __pyx_t_4 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_4)) goto __pyx_L9_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 352, __pyx_L1_error)
+ __pyx_t_7 = NULL;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ goto __pyx_L10_unpacking_done;
+ __pyx_L9_unpacking_failed:;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __pyx_t_7 = NULL;
+ if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index);
+ __PYX_ERR(0, 352, __pyx_L1_error)
+ __pyx_L10_unpacking_done:;
+ }
+ __pyx_t_9 = __Pyx_PyInt_As_npy_uint32(__pyx_t_5); if (unlikely((__pyx_t_9 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_8 = __Pyx_PyInt_As_npy_uint32(__pyx_t_4); if (unlikely((__pyx_t_8 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_eff_sentences = __pyx_t_9;
+ __pyx_v_eff_words = __pyx_t_8;
+
+ /* "average_inner.pyx":354
+ * eff_sentences, eff_words = populate_ft_s2v_config(&ft, model.wv.vocab, indexed_sentences)
+ *
+ * with nogil: # <<<<<<<<<<<<<<
+ * compute_ft_sentence_averages(&ft, eff_sentences)
+ *
+ */
+ {
+ #ifdef WITH_THREAD
+ PyThreadState *_save;
+ Py_UNBLOCK_THREADS
+ __Pyx_FastGIL_Remember();
+ #endif
+ /*try:*/ {
+
+ /* "average_inner.pyx":355
+ *
+ * with nogil:
+ * compute_ft_sentence_averages(&ft, eff_sentences) # <<<<<<<<<<<<<<
+ *
+ * return eff_sentences, eff_words
+ */
+ __pyx_f_13average_inner_compute_ft_sentence_averages((&__pyx_v_ft), __pyx_v_eff_sentences);
+ }
+
+ /* "average_inner.pyx":354
+ * eff_sentences, eff_words = populate_ft_s2v_config(&ft, model.wv.vocab, indexed_sentences)
+ *
+ * with nogil: # <<<<<<<<<<<<<<
+ * compute_ft_sentence_averages(&ft, eff_sentences)
+ *
+ */
+ /*finally:*/ {
+ /*normal exit:*/{
+ #ifdef WITH_THREAD
+ __Pyx_FastGIL_Forget();
+ Py_BLOCK_THREADS
+ #endif
+ goto __pyx_L13;
+ }
+ __pyx_L13:;
+ }
+ }
+ }
+ __pyx_L3:;
+
+ /* "average_inner.pyx":357
+ * compute_ft_sentence_averages(&ft, eff_sentences)
+ *
+ * return eff_sentences, eff_words # <<<<<<<<<<<<<<
+ *
+ * def init():
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_From_npy_uint32(__pyx_v_eff_sentences); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 357, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = __Pyx_PyInt_From_npy_uint32(__pyx_v_eff_words); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 357, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 357, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_4);
+ __pyx_t_1 = 0;
+ __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_5;
+ __pyx_t_5 = 0;
+ goto __pyx_L0;
+
+ /* "average_inner.pyx":313
+ * saxpy(&size, &inv_count, c.mem, &ONE, &c.sentence_vectors[sent_row], &ONE)
+ *
+ * def train_average_cy(model, indexed_sentences, target, memory): # <<<<<<<<<<<<<<
+ * """Training on a sequence of sentences and update the target ndarray.
+ *
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_AddTraceback("average_inner.train_average_cy", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "average_inner.pyx":359
+ * return eff_sentences, eff_words
+ *
+ * def init(): # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_13average_inner_3init(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static char __pyx_doc_13average_inner_2init[] = "init()";
+static PyMethodDef __pyx_mdef_13average_inner_3init = {"init", (PyCFunction)__pyx_pw_13average_inner_3init, METH_NOARGS, __pyx_doc_13average_inner_2init};
+static PyObject *__pyx_pw_13average_inner_3init(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("init (wrapper)", 0);
+ __pyx_r = __pyx_pf_13average_inner_2init(__pyx_self);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_13average_inner_2init(CYTHON_UNUSED PyObject *__pyx_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("init", 0);
+
+ /* "average_inner.pyx":360
+ *
+ * def init():
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * MAX_WORDS_IN_BATCH = MAX_WORDS
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_int_1);
+ __pyx_r = __pyx_int_1;
+ goto __pyx_L0;
+
+ /* "average_inner.pyx":359
+ * return eff_sentences, eff_words
+ *
+ * def init(): # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":258
+ * # experimental exception made for __getbuffer__ and __releasebuffer__
+ * # -- the details of this may change.
+ * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<<
+ * # This implementation of getbuffer is geared towards Cython
+ * # requirements, and does not yet fulfill the PEP.
+ */
+
+/* Python wrapper */
+static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/
+static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
+ int __pyx_v_i;
+ int __pyx_v_ndim;
+ int __pyx_v_endian_detector;
+ int __pyx_v_little_endian;
+ int __pyx_v_t;
+ char *__pyx_v_f;
+ PyArray_Descr *__pyx_v_descr = 0;
+ int __pyx_v_offset;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_t_4;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ PyArray_Descr *__pyx_t_7;
+ PyObject *__pyx_t_8 = NULL;
+ char *__pyx_t_9;
+ if (__pyx_v_info == NULL) {
+ PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete");
+ return -1;
+ }
+ __Pyx_RefNannySetupContext("__getbuffer__", 0);
+ __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(__pyx_v_info->obj);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":265
+ *
+ * cdef int i, ndim
+ * cdef int endian_detector = 1 # <<<<<<<<<<<<<<
+ * cdef bint little_endian = ((&endian_detector)[0] != 0)
+ *
+ */
+ __pyx_v_endian_detector = 1;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":266
+ * cdef int i, ndim
+ * cdef int endian_detector = 1
+ * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<<
+ *
+ * ndim = PyArray_NDIM(self)
+ */
+ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":268
+ * cdef bint little_endian = ((&endian_detector)[0] != 0)
+ *
+ * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<<
+ *
+ * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
+ */
+ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":270
+ * ndim = PyArray_NDIM(self)
+ *
+ * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<<
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
+ * raise ValueError(u"ndarray is not C contiguous")
+ */
+ __pyx_t_2 = (((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS) != 0);
+ if (__pyx_t_2) {
+ } else {
+ __pyx_t_1 = __pyx_t_2;
+ goto __pyx_L4_bool_binop_done;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":271
+ *
+ * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)): # <<<<<<<<<<<<<<
+ * raise ValueError(u"ndarray is not C contiguous")
+ *
+ */
+ __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_C_CONTIGUOUS) != 0)) != 0);
+ __pyx_t_1 = __pyx_t_2;
+ __pyx_L4_bool_binop_done:;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":270
+ * ndim = PyArray_NDIM(self)
+ *
+ * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<<
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
+ * raise ValueError(u"ndarray is not C contiguous")
+ */
+ if (unlikely(__pyx_t_1)) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":272
+ * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
+ * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<<
+ *
+ * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 272, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(1, 272, __pyx_L1_error)
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":270
+ * ndim = PyArray_NDIM(self)
+ *
+ * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<<
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
+ * raise ValueError(u"ndarray is not C contiguous")
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":274
+ * raise ValueError(u"ndarray is not C contiguous")
+ *
+ * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<<
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
+ * raise ValueError(u"ndarray is not Fortran contiguous")
+ */
+ __pyx_t_2 = (((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS) != 0);
+ if (__pyx_t_2) {
+ } else {
+ __pyx_t_1 = __pyx_t_2;
+ goto __pyx_L7_bool_binop_done;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":275
+ *
+ * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)): # <<<<<<<<<<<<<<
+ * raise ValueError(u"ndarray is not Fortran contiguous")
+ *
+ */
+ __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_F_CONTIGUOUS) != 0)) != 0);
+ __pyx_t_1 = __pyx_t_2;
+ __pyx_L7_bool_binop_done:;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":274
+ * raise ValueError(u"ndarray is not C contiguous")
+ *
+ * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<<
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
+ * raise ValueError(u"ndarray is not Fortran contiguous")
+ */
+ if (unlikely(__pyx_t_1)) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":276
+ * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
+ * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<<
+ *
+ * info.buf = PyArray_DATA(self)
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 276, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(1, 276, __pyx_L1_error)
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":274
+ * raise ValueError(u"ndarray is not C contiguous")
+ *
+ * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<<
+ * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
+ * raise ValueError(u"ndarray is not Fortran contiguous")
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":278
+ * raise ValueError(u"ndarray is not Fortran contiguous")
+ *
+ * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<<
+ * info.ndim = ndim
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t):
+ */
+ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":279
+ *
+ * info.buf = PyArray_DATA(self)
+ * info.ndim = ndim # <<<<<<<<<<<<<<
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t):
+ * # Allocate new buffer for strides and shape info.
+ */
+ __pyx_v_info->ndim = __pyx_v_ndim;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":280
+ * info.buf = PyArray_DATA(self)
+ * info.ndim = ndim
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
+ * # Allocate new buffer for strides and shape info.
+ * # This is allocated as one block, strides first.
+ */
+ __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0);
+ if (__pyx_t_1) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":283
+ * # Allocate new buffer for strides and shape info.
+ * # This is allocated as one block, strides first.
+ * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<<
+ * info.shape = info.strides + ndim
+ * for i in range(ndim):
+ */
+ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim))));
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":284
+ * # This is allocated as one block, strides first.
+ * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim)
+ * info.shape = info.strides + ndim # <<<<<<<<<<<<<<
+ * for i in range(ndim):
+ * info.strides[i] = PyArray_STRIDES(self)[i]
+ */
+ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":285
+ * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim)
+ * info.shape = info.strides + ndim
+ * for i in range(ndim): # <<<<<<<<<<<<<<
+ * info.strides[i] = PyArray_STRIDES(self)[i]
+ * info.shape[i] = PyArray_DIMS(self)[i]
+ */
+ __pyx_t_4 = __pyx_v_ndim;
+ __pyx_t_5 = __pyx_t_4;
+ for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) {
+ __pyx_v_i = __pyx_t_6;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":286
+ * info.shape = info.strides + ndim
+ * for i in range(ndim):
+ * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<<
+ * info.shape[i] = PyArray_DIMS(self)[i]
+ * else:
+ */
+ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":287
+ * for i in range(ndim):
+ * info.strides[i] = PyArray_STRIDES(self)[i]
+ * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<<
+ * else:
+ * info.strides = PyArray_STRIDES(self)
+ */
+ (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]);
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":280
+ * info.buf = PyArray_DATA(self)
+ * info.ndim = ndim
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
+ * # Allocate new buffer for strides and shape info.
+ * # This is allocated as one block, strides first.
+ */
+ goto __pyx_L9;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":289
+ * info.shape[i] = PyArray_DIMS(self)[i]
+ * else:
+ * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<<
+ * info.shape = PyArray_DIMS(self)
+ * info.suboffsets = NULL
+ */
+ /*else*/ {
+ __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self));
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":290
+ * else:
+ * info.strides = PyArray_STRIDES(self)
+ * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<<
+ * info.suboffsets = NULL
+ * info.itemsize = PyArray_ITEMSIZE(self)
+ */
+ __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self));
+ }
+ __pyx_L9:;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":291
+ * info.strides = PyArray_STRIDES(self)
+ * info.shape = PyArray_DIMS(self)
+ * info.suboffsets = NULL # <<<<<<<<<<<<<<
+ * info.itemsize = PyArray_ITEMSIZE(self)
+ * info.readonly = not PyArray_ISWRITEABLE(self)
+ */
+ __pyx_v_info->suboffsets = NULL;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":292
+ * info.shape = PyArray_DIMS(self)
+ * info.suboffsets = NULL
+ * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<<
+ * info.readonly = not PyArray_ISWRITEABLE(self)
+ *
+ */
+ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":293
+ * info.suboffsets = NULL
+ * info.itemsize = PyArray_ITEMSIZE(self)
+ * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<<
+ *
+ * cdef int t
+ */
+ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0));
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":296
+ *
+ * cdef int t
+ * cdef char* f = NULL # <<<<<<<<<<<<<<
+ * cdef dtype descr = PyArray_DESCR(self)
+ * cdef int offset
+ */
+ __pyx_v_f = NULL;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":297
+ * cdef int t
+ * cdef char* f = NULL
+ * cdef dtype descr = PyArray_DESCR(self) # <<<<<<<<<<<<<<
+ * cdef int offset
+ *
+ */
+ __pyx_t_7 = PyArray_DESCR(__pyx_v_self);
+ __pyx_t_3 = ((PyObject *)__pyx_t_7);
+ __Pyx_INCREF(__pyx_t_3);
+ __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":300
+ * cdef int offset
+ *
+ * info.obj = self # <<<<<<<<<<<<<<
+ *
+ * if not PyDataType_HASFIELDS(descr):
+ */
+ __Pyx_INCREF(((PyObject *)__pyx_v_self));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_self));
+ __Pyx_GOTREF(__pyx_v_info->obj);
+ __Pyx_DECREF(__pyx_v_info->obj);
+ __pyx_v_info->obj = ((PyObject *)__pyx_v_self);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":302
+ * info.obj = self
+ *
+ * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<<
+ * t = descr.type_num
+ * if ((descr.byteorder == c'>' and little_endian) or
+ */
+ __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0);
+ if (__pyx_t_1) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":303
+ *
+ * if not PyDataType_HASFIELDS(descr):
+ * t = descr.type_num # <<<<<<<<<<<<<<
+ * if ((descr.byteorder == c'>' and little_endian) or
+ * (descr.byteorder == c'<' and not little_endian)):
+ */
+ __pyx_t_4 = __pyx_v_descr->type_num;
+ __pyx_v_t = __pyx_t_4;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":304
+ * if not PyDataType_HASFIELDS(descr):
+ * t = descr.type_num
+ * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
+ * (descr.byteorder == c'<' and not little_endian)):
+ * raise ValueError(u"Non-native byte order not supported")
+ */
+ __pyx_t_2 = ((__pyx_v_descr->byteorder == '>') != 0);
+ if (!__pyx_t_2) {
+ goto __pyx_L15_next_or;
+ } else {
+ }
+ __pyx_t_2 = (__pyx_v_little_endian != 0);
+ if (!__pyx_t_2) {
+ } else {
+ __pyx_t_1 = __pyx_t_2;
+ goto __pyx_L14_bool_binop_done;
+ }
+ __pyx_L15_next_or:;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":305
+ * t = descr.type_num
+ * if ((descr.byteorder == c'>' and little_endian) or
+ * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<<
+ * raise ValueError(u"Non-native byte order not supported")
+ * if t == NPY_BYTE: f = "b"
+ */
+ __pyx_t_2 = ((__pyx_v_descr->byteorder == '<') != 0);
+ if (__pyx_t_2) {
+ } else {
+ __pyx_t_1 = __pyx_t_2;
+ goto __pyx_L14_bool_binop_done;
+ }
+ __pyx_t_2 = ((!(__pyx_v_little_endian != 0)) != 0);
+ __pyx_t_1 = __pyx_t_2;
+ __pyx_L14_bool_binop_done:;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":304
+ * if not PyDataType_HASFIELDS(descr):
+ * t = descr.type_num
+ * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
+ * (descr.byteorder == c'<' and not little_endian)):
+ * raise ValueError(u"Non-native byte order not supported")
+ */
+ if (unlikely(__pyx_t_1)) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":306
+ * if ((descr.byteorder == c'>' and little_endian) or
+ * (descr.byteorder == c'<' and not little_endian)):
+ * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<<
+ * if t == NPY_BYTE: f = "b"
+ * elif t == NPY_UBYTE: f = "B"
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 306, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(1, 306, __pyx_L1_error)
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":304
+ * if not PyDataType_HASFIELDS(descr):
+ * t = descr.type_num
+ * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
+ * (descr.byteorder == c'<' and not little_endian)):
+ * raise ValueError(u"Non-native byte order not supported")
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":307
+ * (descr.byteorder == c'<' and not little_endian)):
+ * raise ValueError(u"Non-native byte order not supported")
+ * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<<
+ * elif t == NPY_UBYTE: f = "B"
+ * elif t == NPY_SHORT: f = "h"
+ */
+ switch (__pyx_v_t) {
+ case NPY_BYTE:
+ __pyx_v_f = ((char *)"b");
+ break;
+ case NPY_UBYTE:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":308
+ * raise ValueError(u"Non-native byte order not supported")
+ * if t == NPY_BYTE: f = "b"
+ * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<<
+ * elif t == NPY_SHORT: f = "h"
+ * elif t == NPY_USHORT: f = "H"
+ */
+ __pyx_v_f = ((char *)"B");
+ break;
+ case NPY_SHORT:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":309
+ * if t == NPY_BYTE: f = "b"
+ * elif t == NPY_UBYTE: f = "B"
+ * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<<
+ * elif t == NPY_USHORT: f = "H"
+ * elif t == NPY_INT: f = "i"
+ */
+ __pyx_v_f = ((char *)"h");
+ break;
+ case NPY_USHORT:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":310
+ * elif t == NPY_UBYTE: f = "B"
+ * elif t == NPY_SHORT: f = "h"
+ * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<<
+ * elif t == NPY_INT: f = "i"
+ * elif t == NPY_UINT: f = "I"
+ */
+ __pyx_v_f = ((char *)"H");
+ break;
+ case NPY_INT:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":311
+ * elif t == NPY_SHORT: f = "h"
+ * elif t == NPY_USHORT: f = "H"
+ * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<<
+ * elif t == NPY_UINT: f = "I"
+ * elif t == NPY_LONG: f = "l"
+ */
+ __pyx_v_f = ((char *)"i");
+ break;
+ case NPY_UINT:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":312
+ * elif t == NPY_USHORT: f = "H"
+ * elif t == NPY_INT: f = "i"
+ * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<<
+ * elif t == NPY_LONG: f = "l"
+ * elif t == NPY_ULONG: f = "L"
+ */
+ __pyx_v_f = ((char *)"I");
+ break;
+ case NPY_LONG:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":313
+ * elif t == NPY_INT: f = "i"
+ * elif t == NPY_UINT: f = "I"
+ * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<<
+ * elif t == NPY_ULONG: f = "L"
+ * elif t == NPY_LONGLONG: f = "q"
+ */
+ __pyx_v_f = ((char *)"l");
+ break;
+ case NPY_ULONG:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":314
+ * elif t == NPY_UINT: f = "I"
+ * elif t == NPY_LONG: f = "l"
+ * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<<
+ * elif t == NPY_LONGLONG: f = "q"
+ * elif t == NPY_ULONGLONG: f = "Q"
+ */
+ __pyx_v_f = ((char *)"L");
+ break;
+ case NPY_LONGLONG:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":315
+ * elif t == NPY_LONG: f = "l"
+ * elif t == NPY_ULONG: f = "L"
+ * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<<
+ * elif t == NPY_ULONGLONG: f = "Q"
+ * elif t == NPY_FLOAT: f = "f"
+ */
+ __pyx_v_f = ((char *)"q");
+ break;
+ case NPY_ULONGLONG:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":316
+ * elif t == NPY_ULONG: f = "L"
+ * elif t == NPY_LONGLONG: f = "q"
+ * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<<
+ * elif t == NPY_FLOAT: f = "f"
+ * elif t == NPY_DOUBLE: f = "d"
+ */
+ __pyx_v_f = ((char *)"Q");
+ break;
+ case NPY_FLOAT:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":317
+ * elif t == NPY_LONGLONG: f = "q"
+ * elif t == NPY_ULONGLONG: f = "Q"
+ * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<<
+ * elif t == NPY_DOUBLE: f = "d"
+ * elif t == NPY_LONGDOUBLE: f = "g"
+ */
+ __pyx_v_f = ((char *)"f");
+ break;
+ case NPY_DOUBLE:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":318
+ * elif t == NPY_ULONGLONG: f = "Q"
+ * elif t == NPY_FLOAT: f = "f"
+ * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<<
+ * elif t == NPY_LONGDOUBLE: f = "g"
+ * elif t == NPY_CFLOAT: f = "Zf"
+ */
+ __pyx_v_f = ((char *)"d");
+ break;
+ case NPY_LONGDOUBLE:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":319
+ * elif t == NPY_FLOAT: f = "f"
+ * elif t == NPY_DOUBLE: f = "d"
+ * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<<
+ * elif t == NPY_CFLOAT: f = "Zf"
+ * elif t == NPY_CDOUBLE: f = "Zd"
+ */
+ __pyx_v_f = ((char *)"g");
+ break;
+ case NPY_CFLOAT:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":320
+ * elif t == NPY_DOUBLE: f = "d"
+ * elif t == NPY_LONGDOUBLE: f = "g"
+ * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<<
+ * elif t == NPY_CDOUBLE: f = "Zd"
+ * elif t == NPY_CLONGDOUBLE: f = "Zg"
+ */
+ __pyx_v_f = ((char *)"Zf");
+ break;
+ case NPY_CDOUBLE:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":321
+ * elif t == NPY_LONGDOUBLE: f = "g"
+ * elif t == NPY_CFLOAT: f = "Zf"
+ * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<<
+ * elif t == NPY_CLONGDOUBLE: f = "Zg"
+ * elif t == NPY_OBJECT: f = "O"
+ */
+ __pyx_v_f = ((char *)"Zd");
+ break;
+ case NPY_CLONGDOUBLE:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":322
+ * elif t == NPY_CFLOAT: f = "Zf"
+ * elif t == NPY_CDOUBLE: f = "Zd"
+ * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<<
+ * elif t == NPY_OBJECT: f = "O"
+ * else:
+ */
+ __pyx_v_f = ((char *)"Zg");
+ break;
+ case NPY_OBJECT:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":323
+ * elif t == NPY_CDOUBLE: f = "Zd"
+ * elif t == NPY_CLONGDOUBLE: f = "Zg"
+ * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
+ */
+ __pyx_v_f = ((char *)"O");
+ break;
+ default:
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":325
+ * elif t == NPY_OBJECT: f = "O"
+ * else:
+ * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<<
+ * info.format = f
+ * return
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 325, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_8 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 325, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 325, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(1, 325, __pyx_L1_error)
+ break;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":326
+ * else:
+ * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
+ * info.format = f # <<<<<<<<<<<<<<
+ * return
+ * else:
+ */
+ __pyx_v_info->format = __pyx_v_f;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":327
+ * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
+ * info.format = f
+ * return # <<<<<<<<<<<<<<
+ * else:
+ * info.format = PyObject_Malloc(_buffer_format_string_len)
+ */
+ __pyx_r = 0;
+ goto __pyx_L0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":302
+ * info.obj = self
+ *
+ * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<<
+ * t = descr.type_num
+ * if ((descr.byteorder == c'>' and little_endian) or
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":329
+ * return
+ * else:
+ * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<<
+ * info.format[0] = c'^' # Native data types, manual alignment
+ * offset = 0
+ */
+ /*else*/ {
+ __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF));
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":330
+ * else:
+ * info.format = PyObject_Malloc(_buffer_format_string_len)
+ * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<<
+ * offset = 0
+ * f = _util_dtypestring(descr, info.format + 1,
+ */
+ (__pyx_v_info->format[0]) = '^';
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":331
+ * info.format = PyObject_Malloc(_buffer_format_string_len)
+ * info.format[0] = c'^' # Native data types, manual alignment
+ * offset = 0 # <<<<<<<<<<<<<<
+ * f = _util_dtypestring(descr, info.format + 1,
+ * info.format + _buffer_format_string_len,
+ */
+ __pyx_v_offset = 0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":332
+ * info.format[0] = c'^' # Native data types, manual alignment
+ * offset = 0
+ * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<<
+ * info.format + _buffer_format_string_len,
+ * &offset)
+ */
+ __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(1, 332, __pyx_L1_error)
+ __pyx_v_f = __pyx_t_9;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":335
+ * info.format + _buffer_format_string_len,
+ * &offset)
+ * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<<
+ *
+ * def __releasebuffer__(ndarray self, Py_buffer* info):
+ */
+ (__pyx_v_f[0]) = '\x00';
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":258
+ * # experimental exception made for __getbuffer__ and __releasebuffer__
+ * # -- the details of this may change.
+ * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<<
+ * # This implementation of getbuffer is geared towards Cython
+ * # requirements, and does not yet fulfill the PEP.
+ */
+
+ /* function exit code */
+ __pyx_r = 0;
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = -1;
+ if (__pyx_v_info->obj != NULL) {
+ __Pyx_GOTREF(__pyx_v_info->obj);
+ __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
+ }
+ goto __pyx_L2;
+ __pyx_L0:;
+ if (__pyx_v_info->obj == Py_None) {
+ __Pyx_GOTREF(__pyx_v_info->obj);
+ __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
+ }
+ __pyx_L2:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_descr);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":337
+ * f[0] = c'\0' # Terminate format string
+ *
+ * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<<
+ * if PyArray_HASFIELDS(self):
+ * PyObject_Free(info.format)
+ */
+
+/* Python wrapper */
+static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/
+static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0);
+ __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) {
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ __Pyx_RefNannySetupContext("__releasebuffer__", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":338
+ *
+ * def __releasebuffer__(ndarray self, Py_buffer* info):
+ * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<<
+ * PyObject_Free(info.format)
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t):
+ */
+ __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0);
+ if (__pyx_t_1) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":339
+ * def __releasebuffer__(ndarray self, Py_buffer* info):
+ * if PyArray_HASFIELDS(self):
+ * PyObject_Free(info.format) # <<<<<<<<<<<<<<
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t):
+ * PyObject_Free(info.strides)
+ */
+ PyObject_Free(__pyx_v_info->format);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":338
+ *
+ * def __releasebuffer__(ndarray self, Py_buffer* info):
+ * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<<
+ * PyObject_Free(info.format)
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t):
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":340
+ * if PyArray_HASFIELDS(self):
+ * PyObject_Free(info.format)
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
+ * PyObject_Free(info.strides)
+ * # info.shape was stored after info.strides in the same block
+ */
+ __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0);
+ if (__pyx_t_1) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":341
+ * PyObject_Free(info.format)
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t):
+ * PyObject_Free(info.strides) # <<<<<<<<<<<<<<
+ * # info.shape was stored after info.strides in the same block
+ *
+ */
+ PyObject_Free(__pyx_v_info->strides);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":340
+ * if PyArray_HASFIELDS(self):
+ * PyObject_Free(info.format)
+ * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
+ * PyObject_Free(info.strides)
+ * # info.shape was stored after info.strides in the same block
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":337
+ * f[0] = c'\0' # Terminate format string
+ *
+ * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<<
+ * if PyArray_HASFIELDS(self):
+ * PyObject_Free(info.format)
+ */
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":821
+ * ctypedef npy_cdouble complex_t
+ *
+ * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(1, a)
+ *
+ */
+
+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":822
+ *
+ * cdef inline object PyArray_MultiIterNew1(a):
+ * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<<
+ *
+ * cdef inline object PyArray_MultiIterNew2(a, b):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 822, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":821
+ * ctypedef npy_cdouble complex_t
+ *
+ * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(1, a)
+ *
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":824
+ * return PyArray_MultiIterNew(1, a)
+ *
+ * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(2, a, b)
+ *
+ */
+
+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":825
+ *
+ * cdef inline object PyArray_MultiIterNew2(a, b):
+ * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<<
+ *
+ * cdef inline object PyArray_MultiIterNew3(a, b, c):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 825, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":824
+ * return PyArray_MultiIterNew(1, a)
+ *
+ * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(2, a, b)
+ *
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":827
+ * return PyArray_MultiIterNew(2, a, b)
+ *
+ * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(3, a, b, c)
+ *
+ */
+
+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":828
+ *
+ * cdef inline object PyArray_MultiIterNew3(a, b, c):
+ * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<<
+ *
+ * cdef inline object PyArray_MultiIterNew4(a, b, c, d):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 828, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":827
+ * return PyArray_MultiIterNew(2, a, b)
+ *
+ * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(3, a, b, c)
+ *
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":830
+ * return PyArray_MultiIterNew(3, a, b, c)
+ *
+ * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(4, a, b, c, d)
+ *
+ */
+
+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":831
+ *
+ * cdef inline object PyArray_MultiIterNew4(a, b, c, d):
+ * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<<
+ *
+ * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 831, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":830
+ * return PyArray_MultiIterNew(3, a, b, c)
+ *
+ * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(4, a, b, c, d)
+ *
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":833
+ * return PyArray_MultiIterNew(4, a, b, c, d)
+ *
+ * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(5, a, b, c, d, e)
+ *
+ */
+
+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":834
+ *
+ * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
+ * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<<
+ *
+ * cdef inline tuple PyDataType_SHAPE(dtype d):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 834, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":833
+ * return PyArray_MultiIterNew(4, a, b, c, d)
+ *
+ * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<<
+ * return PyArray_MultiIterNew(5, a, b, c, d, e)
+ *
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":836
+ * return PyArray_MultiIterNew(5, a, b, c, d, e)
+ *
+ * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<<
+ * if PyDataType_HASSUBARRAY(d):
+ * return d.subarray.shape
+ */
+
+static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__pyx_v_d) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":837
+ *
+ * cdef inline tuple PyDataType_SHAPE(dtype d):
+ * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<<
+ * return d.subarray.shape
+ * else:
+ */
+ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0);
+ if (__pyx_t_1) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":838
+ * cdef inline tuple PyDataType_SHAPE(dtype d):
+ * if PyDataType_HASSUBARRAY(d):
+ * return d.subarray.shape # <<<<<<<<<<<<<<
+ * else:
+ * return ()
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(((PyObject*)__pyx_v_d->subarray->shape));
+ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape);
+ goto __pyx_L0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":837
+ *
+ * cdef inline tuple PyDataType_SHAPE(dtype d):
+ * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<<
+ * return d.subarray.shape
+ * else:
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":840
+ * return d.subarray.shape
+ * else:
+ * return () # <<<<<<<<<<<<<<
+ *
+ * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL:
+ */
+ /*else*/ {
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_empty_tuple);
+ __pyx_r = __pyx_empty_tuple;
+ goto __pyx_L0;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":836
+ * return PyArray_MultiIterNew(5, a, b, c, d, e)
+ *
+ * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<<
+ * if PyDataType_HASSUBARRAY(d):
+ * return d.subarray.shape
+ */
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":842
+ * return ()
+ *
+ * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<<
+ * # Recursive utility function used in __getbuffer__ to get format
+ * # string. The new location in the format string is returned.
+ */
+
+static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) {
+ PyArray_Descr *__pyx_v_child = 0;
+ int __pyx_v_endian_detector;
+ int __pyx_v_little_endian;
+ PyObject *__pyx_v_fields = 0;
+ PyObject *__pyx_v_childname = NULL;
+ PyObject *__pyx_v_new_offset = NULL;
+ PyObject *__pyx_v_t = NULL;
+ char *__pyx_r;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ Py_ssize_t __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ int __pyx_t_7;
+ long __pyx_t_8;
+ char *__pyx_t_9;
+ __Pyx_RefNannySetupContext("_util_dtypestring", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":847
+ *
+ * cdef dtype child
+ * cdef int endian_detector = 1 # <<<<<<<<<<<<<<
+ * cdef bint little_endian = ((&endian_detector)[0] != 0)
+ * cdef tuple fields
+ */
+ __pyx_v_endian_detector = 1;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":848
+ * cdef dtype child
+ * cdef int endian_detector = 1
+ * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<<
+ * cdef tuple fields
+ *
+ */
+ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":851
+ * cdef tuple fields
+ *
+ * for childname in descr.names: # <<<<<<<<<<<<<<
+ * fields = descr.fields[childname]
+ * child, new_offset = fields
+ */
+ if (unlikely(__pyx_v_descr->names == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable");
+ __PYX_ERR(1, 851, __pyx_L1_error)
+ }
+ __pyx_t_1 = __pyx_v_descr->names; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0;
+ for (;;) {
+ if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(1, 851, __pyx_L1_error)
+ #else
+ __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 851, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ #endif
+ __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":852
+ *
+ * for childname in descr.names:
+ * fields = descr.fields[childname] # <<<<<<<<<<<<<<
+ * child, new_offset = fields
+ *
+ */
+ if (unlikely(__pyx_v_descr->fields == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
+ __PYX_ERR(1, 852, __pyx_L1_error)
+ }
+ __pyx_t_3 = __Pyx_PyDict_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 852, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(1, 852, __pyx_L1_error)
+ __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3));
+ __pyx_t_3 = 0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":853
+ * for childname in descr.names:
+ * fields = descr.fields[childname]
+ * child, new_offset = fields # <<<<<<<<<<<<<<
+ *
+ * if (end - f) - (new_offset - offset[0]) < 15:
+ */
+ if (likely(__pyx_v_fields != Py_None)) {
+ PyObject* sequence = __pyx_v_fields;
+ Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);
+ if (unlikely(size != 2)) {
+ if (size > 2) __Pyx_RaiseTooManyValuesError(2);
+ else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
+ __PYX_ERR(1, 853, __pyx_L1_error)
+ }
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0);
+ __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ #else
+ __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 853, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 853, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ } else {
+ __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(1, 853, __pyx_L1_error)
+ }
+ if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) __PYX_ERR(1, 853, __pyx_L1_error)
+ __Pyx_XDECREF_SET(__pyx_v_child, ((PyArray_Descr *)__pyx_t_3));
+ __pyx_t_3 = 0;
+ __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":855
+ * child, new_offset = fields
+ *
+ * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<<
+ * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
+ *
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 855, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 855, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 855, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0);
+ if (unlikely(__pyx_t_6)) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":856
+ *
+ * if (end - f) - (new_offset - offset[0]) < 15:
+ * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<<
+ *
+ * if ((child.byteorder == c'>' and little_endian) or
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 856, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(1, 856, __pyx_L1_error)
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":855
+ * child, new_offset = fields
+ *
+ * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<<
+ * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
+ *
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":858
+ * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
+ *
+ * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
+ * (child.byteorder == c'<' and not little_endian)):
+ * raise ValueError(u"Non-native byte order not supported")
+ */
+ __pyx_t_7 = ((__pyx_v_child->byteorder == '>') != 0);
+ if (!__pyx_t_7) {
+ goto __pyx_L8_next_or;
+ } else {
+ }
+ __pyx_t_7 = (__pyx_v_little_endian != 0);
+ if (!__pyx_t_7) {
+ } else {
+ __pyx_t_6 = __pyx_t_7;
+ goto __pyx_L7_bool_binop_done;
+ }
+ __pyx_L8_next_or:;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":859
+ *
+ * if ((child.byteorder == c'>' and little_endian) or
+ * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<<
+ * raise ValueError(u"Non-native byte order not supported")
+ * # One could encode it in the format string and have Cython
+ */
+ __pyx_t_7 = ((__pyx_v_child->byteorder == '<') != 0);
+ if (__pyx_t_7) {
+ } else {
+ __pyx_t_6 = __pyx_t_7;
+ goto __pyx_L7_bool_binop_done;
+ }
+ __pyx_t_7 = ((!(__pyx_v_little_endian != 0)) != 0);
+ __pyx_t_6 = __pyx_t_7;
+ __pyx_L7_bool_binop_done:;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":858
+ * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
+ *
+ * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
+ * (child.byteorder == c'<' and not little_endian)):
+ * raise ValueError(u"Non-native byte order not supported")
+ */
+ if (unlikely(__pyx_t_6)) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":860
+ * if ((child.byteorder == c'>' and little_endian) or
+ * (child.byteorder == c'<' and not little_endian)):
+ * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<<
+ * # One could encode it in the format string and have Cython
+ * # complain instead, BUT: < and > in format strings also imply
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 860, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(1, 860, __pyx_L1_error)
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":858
+ * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
+ *
+ * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
+ * (child.byteorder == c'<' and not little_endian)):
+ * raise ValueError(u"Non-native byte order not supported")
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":870
+ *
+ * # Output padding bytes
+ * while offset[0] < new_offset: # <<<<<<<<<<<<<<
+ * f[0] = 120 # "x"; pad byte
+ * f += 1
+ */
+ while (1) {
+ __pyx_t_3 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 870, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyObject_RichCompare(__pyx_t_3, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 870, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 870, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (!__pyx_t_6) break;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":871
+ * # Output padding bytes
+ * while offset[0] < new_offset:
+ * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<<
+ * f += 1
+ * offset[0] += 1
+ */
+ (__pyx_v_f[0]) = 0x78;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":872
+ * while offset[0] < new_offset:
+ * f[0] = 120 # "x"; pad byte
+ * f += 1 # <<<<<<<<<<<<<<
+ * offset[0] += 1
+ *
+ */
+ __pyx_v_f = (__pyx_v_f + 1);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":873
+ * f[0] = 120 # "x"; pad byte
+ * f += 1
+ * offset[0] += 1 # <<<<<<<<<<<<<<
+ *
+ * offset[0] += child.itemsize
+ */
+ __pyx_t_8 = 0;
+ (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1);
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":875
+ * offset[0] += 1
+ *
+ * offset[0] += child.itemsize # <<<<<<<<<<<<<<
+ *
+ * if not PyDataType_HASFIELDS(child):
+ */
+ __pyx_t_8 = 0;
+ (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":877
+ * offset[0] += child.itemsize
+ *
+ * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<<
+ * t = child.type_num
+ * if end - f < 5:
+ */
+ __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0);
+ if (__pyx_t_6) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":878
+ *
+ * if not PyDataType_HASFIELDS(child):
+ * t = child.type_num # <<<<<<<<<<<<<<
+ * if end - f < 5:
+ * raise RuntimeError(u"Format string allocated too short.")
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_child->type_num); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 878, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":879
+ * if not PyDataType_HASFIELDS(child):
+ * t = child.type_num
+ * if end - f < 5: # <<<<<<<<<<<<<<
+ * raise RuntimeError(u"Format string allocated too short.")
+ *
+ */
+ __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0);
+ if (unlikely(__pyx_t_6)) {
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":880
+ * t = child.type_num
+ * if end - f < 5:
+ * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<<
+ *
+ * # Until ticket #99 is fixed, use integers to avoid warnings
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 880, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(1, 880, __pyx_L1_error)
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":879
+ * if not PyDataType_HASFIELDS(child):
+ * t = child.type_num
+ * if end - f < 5: # <<<<<<<<<<<<<<
+ * raise RuntimeError(u"Format string allocated too short.")
+ *
+ */
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":883
+ *
+ * # Until ticket #99 is fixed, use integers to avoid warnings
+ * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<<
+ * elif t == NPY_UBYTE: f[0] = 66 #"B"
+ * elif t == NPY_SHORT: f[0] = 104 #"h"
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_BYTE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 883, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 883, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 883, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 98;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":884
+ * # Until ticket #99 is fixed, use integers to avoid warnings
+ * if t == NPY_BYTE: f[0] = 98 #"b"
+ * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<<
+ * elif t == NPY_SHORT: f[0] = 104 #"h"
+ * elif t == NPY_USHORT: f[0] = 72 #"H"
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UBYTE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 884, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 884, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 884, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 66;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":885
+ * if t == NPY_BYTE: f[0] = 98 #"b"
+ * elif t == NPY_UBYTE: f[0] = 66 #"B"
+ * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<<
+ * elif t == NPY_USHORT: f[0] = 72 #"H"
+ * elif t == NPY_INT: f[0] = 105 #"i"
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_SHORT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 885, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 885, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 885, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 0x68;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":886
+ * elif t == NPY_UBYTE: f[0] = 66 #"B"
+ * elif t == NPY_SHORT: f[0] = 104 #"h"
+ * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<<
+ * elif t == NPY_INT: f[0] = 105 #"i"
+ * elif t == NPY_UINT: f[0] = 73 #"I"
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_USHORT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 886, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 886, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 886, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 72;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":887
+ * elif t == NPY_SHORT: f[0] = 104 #"h"
+ * elif t == NPY_USHORT: f[0] = 72 #"H"
+ * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<<
+ * elif t == NPY_UINT: f[0] = 73 #"I"
+ * elif t == NPY_LONG: f[0] = 108 #"l"
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_INT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 887, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 887, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 887, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 0x69;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":888
+ * elif t == NPY_USHORT: f[0] = 72 #"H"
+ * elif t == NPY_INT: f[0] = 105 #"i"
+ * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<<
+ * elif t == NPY_LONG: f[0] = 108 #"l"
+ * elif t == NPY_ULONG: f[0] = 76 #"L"
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UINT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 888, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 888, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 888, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 73;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":889
+ * elif t == NPY_INT: f[0] = 105 #"i"
+ * elif t == NPY_UINT: f[0] = 73 #"I"
+ * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<<
+ * elif t == NPY_ULONG: f[0] = 76 #"L"
+ * elif t == NPY_LONGLONG: f[0] = 113 #"q"
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 889, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 889, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 889, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 0x6C;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":890
+ * elif t == NPY_UINT: f[0] = 73 #"I"
+ * elif t == NPY_LONG: f[0] = 108 #"l"
+ * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<<
+ * elif t == NPY_LONGLONG: f[0] = 113 #"q"
+ * elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 890, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 890, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 890, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 76;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":891
+ * elif t == NPY_LONG: f[0] = 108 #"l"
+ * elif t == NPY_ULONG: f[0] = 76 #"L"
+ * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<<
+ * elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
+ * elif t == NPY_FLOAT: f[0] = 102 #"f"
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGLONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 891, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 891, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 891, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 0x71;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":892
+ * elif t == NPY_ULONG: f[0] = 76 #"L"
+ * elif t == NPY_LONGLONG: f[0] = 113 #"q"
+ * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<<
+ * elif t == NPY_FLOAT: f[0] = 102 #"f"
+ * elif t == NPY_DOUBLE: f[0] = 100 #"d"
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONGLONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 892, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 892, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 892, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 81;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":893
+ * elif t == NPY_LONGLONG: f[0] = 113 #"q"
+ * elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
+ * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<<
+ * elif t == NPY_DOUBLE: f[0] = 100 #"d"
+ * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_FLOAT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 893, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 893, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 893, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 0x66;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":894
+ * elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
+ * elif t == NPY_FLOAT: f[0] = 102 #"f"
+ * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<<
+ * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
+ * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_DOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 894, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 894, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 894, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 0x64;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":895
+ * elif t == NPY_FLOAT: f[0] = 102 #"f"
+ * elif t == NPY_DOUBLE: f[0] = 100 #"d"
+ * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<<
+ * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
+ * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 895, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 895, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 895, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 0x67;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":896
+ * elif t == NPY_DOUBLE: f[0] = 100 #"d"
+ * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
+ * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<<
+ * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
+ * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CFLOAT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 896, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 896, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 896, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 90;
+ (__pyx_v_f[1]) = 0x66;
+ __pyx_v_f = (__pyx_v_f + 1);
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":897
+ * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
+ * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
+ * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<<
+ * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg
+ * elif t == NPY_OBJECT: f[0] = 79 #"O"
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 897, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 897, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 897, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 90;
+ (__pyx_v_f[1]) = 0x64;
+ __pyx_v_f = (__pyx_v_f + 1);
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":898
+ * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
+ * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
+ * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<<
+ * elif t == NPY_OBJECT: f[0] = 79 #"O"
+ * else:
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 898, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 898, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 898, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_6) {
+ (__pyx_v_f[0]) = 90;
+ (__pyx_v_f[1]) = 0x67;
+ __pyx_v_f = (__pyx_v_f + 1);
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":899
+ * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
+ * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg
+ * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_OBJECT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 899, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 899, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 899, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (likely(__pyx_t_6)) {
+ (__pyx_v_f[0]) = 79;
+ goto __pyx_L15;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":901
+ * elif t == NPY_OBJECT: f[0] = 79 #"O"
+ * else:
+ * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<<
+ * f += 1
+ * else:
+ */
+ /*else*/ {
+ __pyx_t_3 = __Pyx_PyUnicode_FormatSafe(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 901, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 901, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(1, 901, __pyx_L1_error)
+ }
+ __pyx_L15:;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":902
+ * else:
+ * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
+ * f += 1 # <<<<<<<<<<<<<<
+ * else:
+ * # Cython ignores struct boundary information ("T{...}"),
+ */
+ __pyx_v_f = (__pyx_v_f + 1);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":877
+ * offset[0] += child.itemsize
+ *
+ * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<<
+ * t = child.type_num
+ * if end - f < 5:
+ */
+ goto __pyx_L13;
+ }
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":906
+ * # Cython ignores struct boundary information ("T{...}"),
+ * # so don't output it
+ * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<<
+ * return f
+ *
+ */
+ /*else*/ {
+ __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(1, 906, __pyx_L1_error)
+ __pyx_v_f = __pyx_t_9;
+ }
+ __pyx_L13:;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":851
+ * cdef tuple fields
+ *
+ * for childname in descr.names: # <<<<<<<<<<<<<<
+ * fields = descr.fields[childname]
+ * child, new_offset = fields
+ */
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":907
+ * # so don't output it
+ * f = _util_dtypestring(child, f, end, offset)
+ * return f # <<<<<<<<<<<<<<
+ *
+ *
+ */
+ __pyx_r = __pyx_v_f;
+ goto __pyx_L0;
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":842
+ * return ()
+ *
+ * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<<
+ * # Recursive utility function used in __getbuffer__ to get format
+ * # string. The new location in the format string is returned.
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_child);
+ __Pyx_XDECREF(__pyx_v_fields);
+ __Pyx_XDECREF(__pyx_v_childname);
+ __Pyx_XDECREF(__pyx_v_new_offset);
+ __Pyx_XDECREF(__pyx_v_t);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":1022
+ * int _import_umath() except -1
+ *
+ * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<<
+ * Py_INCREF(base) # important to do this before stealing the reference below!
+ * PyArray_SetBaseObject(arr, base)
+ */
+
+static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("set_array_base", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":1023
+ *
+ * cdef inline void set_array_base(ndarray arr, object base):
+ * Py_INCREF(base) # important to do this before stealing the reference below! # <<<<<<<<<<<<<<
+ * PyArray_SetBaseObject(arr, base)
+ *
+ */
+ Py_INCREF(__pyx_v_base);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":1024
+ * cdef inline void set_array_base(ndarray arr, object base):
+ * Py_INCREF(base) # important to do this before stealing the reference below!
+ * PyArray_SetBaseObject(arr, base) # <<<<<<<<<<<<<<
+ *
+ * cdef inline object get_array_base(ndarray arr):
+ */
+ (void)(PyArray_SetBaseObject(__pyx_v_arr, __pyx_v_base));
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":1022
+ * int _import_umath() except -1
+ *
+ * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<<
+ * Py_INCREF(base) # important to do this before stealing the reference below!
+ * PyArray_SetBaseObject(arr, base)
+ */
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+/* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":1026
+ * PyArray_SetBaseObject(arr, base)
+ *
+ * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<<
+ * base = PyArray_BASE(arr)
+ * if base is NULL:
+ */
+
+static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) {
+ PyObject *__pyx_v_base;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ __Pyx_RefNannySetupContext("get_array_base", 0);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":1027
+ *
+ * cdef inline object get_array_base(ndarray arr):
+ * base = PyArray_BASE(arr) # <<<<<<<<<<<<<<
+ * if base is NULL:
+ * return None
+ */
+ __pyx_v_base = PyArray_BASE(__pyx_v_arr);
+
+ /* "../../../../../../../../anaconda3/envs/fsedev/lib/python3.7/site-packages/Cython/Includes/numpy/__init__.pxd":1028
+ * cdef inline object get_array_base(ndarray arr):
+ * base = PyArray_BASE(arr)
+ * if base is NULL: # <<<<<<<<<<<<<<
+ * return None
+ * return