github.com/apache/beam/sdks/v2@v2.48.2/python/apache_beam/transforms/cy_dataflow_distribution_counter.pxd (about) 1 # Licensed to the Apache Software Foundation (ASF) under one or more 2 # contributor license agreements. See the NOTICE file distributed with 3 # this work for additional information regarding copyright ownership. 4 # The ASF licenses this file to You under the Apache License, Version 2.0 5 # (the "License"); you may not use this file except in compliance with 6 # the License. You may obtain a copy of the License at 7 # 8 # http://www.apache.org/licenses/LICENSE-2.0 9 # 10 # Unless required by applicable law or agreed to in writing, software 11 # distributed under the License is distributed on an "AS IS" BASIS, 12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 # See the License for the specific language governing permissions and 14 # limitations under the License. 15 # 16 17 # cython: profile=True 18 19 """ For internal use only. No backwards compatibility guarantees.""" 20 21 cimport cython 22 from libc.stdint cimport int64_t 23 24 25 # 3 buckets for every power of ten -> 1, 2, 5 26 cdef enum: 27 BUCKET_PER_TEN = 3 28 29 # Assume the max input is max(int64_t), then the possible max bucket size is 59 30 cdef enum: 31 MAX_BUCKET_SIZE = 59 32 33 cdef class DataflowDistributionCounter(object): 34 cdef public int64_t min 35 cdef public int64_t max 36 cdef public int64_t count 37 cdef public int64_t sum 38 cdef int64_t* buckets 39 cdef public bint is_cythonized 40 cpdef bint add_input(self, int64_t element) except -1 41 cpdef bint add_input_n(self, int64_t element, int64_t n) except -1 42 cdef int64_t _fast_calculate_bucket_index(self, int64_t element) 43 cpdef void translate_to_histogram(self, histogram) 44 cpdef bint add_inputs_for_test(self, elements) except -1 45 cpdef int64_t calculate_bucket_index(self, int64_t element) 46 cpdef tuple extract_output(self) 47 cpdef merge(self, accumulators)