Skip to content

PERF: cythonizing _concat_date_cols; conversion to float without exceptions in _does_string_look_like_datetime #25754

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 42 commits into from
May 12, 2019
Merged
Changes from 1 commit
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
45dfa46
PERF: rewrited _concat_date_cols function on C with removing extra co…
anmyachev Mar 17, 2019
1531ec9
perf bench for _concat_date_cols
anmyachev Mar 18, 2019
0756da9
Add benchmark for _does_string_look_like_datetime
vnlitvinov Mar 18, 2019
36b8bdb
implemented _does_string_look_like_datetime in cython
anmyachev Mar 19, 2019
a9afbdb
new benchmark for _concat_date_cols func
anmyachev Mar 19, 2019
ee1f32b
init cython version of _concat_date_cols
anmyachev Mar 20, 2019
84e1b00
fix C version of _concat_date_cols
anmyachev Mar 20, 2019
2cf9f22
added ConcatDateColsList benchmark
anmyachev Mar 20, 2019
28fd5f5
ready cython version, combined concat benchmarks
anmyachev Mar 21, 2019
1f17cf9
added forgotten check for float NaN
anmyachev Mar 21, 2019
d1f8ce5
Cython version of _concat_date_cols works for all cases
vnlitvinov Mar 21, 2019
e44212c
Fix typo in _concat_date_cols
vnlitvinov Mar 21, 2019
6af73bf
used flatiter for numpy array
anmyachev Mar 21, 2019
d4305a9
Fix Cython compilation issues
vnlitvinov Mar 22, 2019
fa3ae05
Remove C version of _concat_date_cols
vnlitvinov Mar 22, 2019
49d66e0
Fix linting errors
vnlitvinov Mar 22, 2019
09e4da6
Try to speed up 1D list
vnlitvinov Mar 22, 2019
67d9509
Hopefully speed up 2D case
vnlitvinov Mar 22, 2019
f05564d
Fix isort, retain some comments
vnlitvinov Mar 22, 2019
b9c96fd
removed unnecessary common_include list with headers; some change cod…
anmyachev Mar 25, 2019
6dc3c51
using util.is_array now; changed double to float64_t; fix docstring
anmyachev Mar 27, 2019
08c7f47
split _concat_date_cols functionality
anmyachev Mar 27, 2019
ba6b86a
added error parameter for xstrtod call
anmyachev Mar 28, 2019
14b9cad
removed Py_SIZE; renamed indexes
anmyachev Mar 28, 2019
4e9211b
Switch to helper method for getting C buffer of string object
vnlitvinov Mar 29, 2019
0aefa7b
changed return type in _concat_date_cols_* functions from void to cnp…
anmyachev Mar 31, 2019
a3a0a77
added doc-string to _concat_date_cols* functions
anmyachev Mar 31, 2019
f1ae23c
added doc-string for convert_and_set_item func; removed isinstance(it…
anmyachev Apr 1, 2019
8797e53
fix docstrings
anmyachev Apr 1, 2019
3bdb452
currently only one conversion function is used - convert_to_unicode
anmyachev Apr 5, 2019
dcbcd9a
added some comments in _concat_date_cols_numpy
anmyachev Apr 5, 2019
1d9c7b7
fix problem from rebase
anmyachev Apr 5, 2019
b4fc887
added some comments in _does_string_look_like_datetime
anmyachev Apr 5, 2019
25ee2d2
changed default value of keep_trivial_numbers to true
anmyachev Apr 5, 2019
2046dcb
Remove not needed try..except in _does_string_look_like_datetime benc…
vnlitvinov Apr 29, 2019
28b6670
upgraded doc-ststring; added some blank lines
anmyachev Apr 29, 2019
30f70ab
removed '_concat_date_cols_sequence' func
anmyachev May 6, 2019
3800c40
now only one function '_concat_date_cols'
anmyachev May 6, 2019
b45df3f
removed 'do_convert' local var from 'convert_to_unicode'
anmyachev May 7, 2019
43dffec
replaced '_concat_date_cols' and 'convert_to_unicode' from lib.pyx to…
anmyachev May 7, 2019
c06a662
added 'test_concat_date_col_fail' test
anmyachev May 7, 2019
5dda33c
added doc-string to '_does_string_look_like_datetime' func; changed '…
anmyachev May 9, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
removed Py_SIZE; renamed indexes
  • Loading branch information
anmyachev committed May 8, 2019
commit 14b9cad42a7bd7f40a3570ab8b5842ac81161c00
93 changes: 44 additions & 49 deletions pandas/_libs/lib.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -2323,7 +2323,7 @@ cdef inline void convert_and_set_item(object item, Py_ssize_t index,
float64_t float_item

if keep_trivial_numbers:
if isinstance(item, int) and Py_SIZE(item) < 2:
if isinstance(item, int):
if <int>item == 0:
do_convert = 0
elif isinstance(item, float):
Expand All @@ -2348,83 +2348,78 @@ cdef inline void put_object_as_unicode(list lst, Py_ssize_t idx,

@cython.wraparound(False)
@cython.boundscheck(False)
cdef void concat_date_cols_numpy(tuple date_cols, object[:] result_view,
Py_ssize_t min_size,
bint keep_trivial_numbers=False):
cdef void _concat_date_cols_numpy(tuple date_cols, object[:] result_view,
Py_ssize_t rows_count, Py_ssize_t col_count,
bint keep_trivial_numbers):
cdef:
Py_ssize_t i, j, sequence_size = len(date_cols)
Py_ssize_t col_idx, row_idx
list list_to_join
cnp.ndarray[object] iters
object[::1] iters_view
flatiter it

if sequence_size == 1:
if col_count == 1:
array = date_cols[0]
it = <flatiter>PyArray_IterNew(array)
for i in range(min_size):
for row_idx in range(rows_count):
item = PyArray_GETITEM(array, PyArray_ITER_DATA(it))
convert_and_set_item(item, i, result_view, keep_trivial_numbers)
convert_and_set_item(item, row_idx, result_view,
keep_trivial_numbers)
PyArray_ITER_NEXT(it)
else:
list_to_join = [None] * sequence_size
list_to_join = [None] * col_count
# setup iterators
iters = np.zeros(sequence_size, dtype=object)
iters = np.zeros(col_count, dtype=object)
iters_view = iters
for i, array in enumerate(date_cols):
iters_view[i] = PyArray_IterNew(array)
for i in range(min_size):
for j, array in enumerate(date_cols):
it = <flatiter>iters_view[j]
for col_idx, array in enumerate(date_cols):
iters_view[col_idx] = PyArray_IterNew(array)
for row_idx in range(rows_count):
for col_idx, array in enumerate(date_cols):
it = <flatiter>iters_view[col_idx]
item = PyArray_GETITEM(array, PyArray_ITER_DATA(it))
put_object_as_unicode(list_to_join, j, item)
put_object_as_unicode(list_to_join, col_idx, item)
PyArray_ITER_NEXT(it)
result_view[i] = PyUnicode_Join(' ', list_to_join)
result_view[row_idx] = PyUnicode_Join(' ', list_to_join)


@cython.wraparound(False)
@cython.boundscheck(False)
cdef void concat_date_cols_sequence(tuple date_cols, object[:] result_view,
Py_ssize_t min_size,
bint keep_trivial_numbers=False):
cdef void _concat_date_cols_sequence(tuple date_cols, object[:] result_view,
Py_ssize_t rows_count,
Py_ssize_t col_count,
bint keep_trivial_numbers):
cdef:
Py_ssize_t i, j, sequence_size = len(date_cols)
Py_ssize_t col_idx, row_idx
list list_to_join

if sequence_size == 1:
for i, item in enumerate(date_cols[0]):
convert_and_set_item(item, i, result_view, keep_trivial_numbers)
if col_count == 1:
for row_idx, item in enumerate(date_cols[0]):
convert_and_set_item(item, row_idx, result_view,
keep_trivial_numbers)
else:
list_to_join = [None] * sequence_size
for i in range(min_size):
for j, array in enumerate(date_cols):
put_object_as_unicode(list_to_join, j, array[i])
result_view[i] = PyUnicode_Join(' ', list_to_join)
list_to_join = [None] * col_count
for row_idx in range(rows_count):
for col_idx, array in enumerate(date_cols):
put_object_as_unicode(list_to_join, col_idx, array[row_idx])
result_view[row_idx] = PyUnicode_Join(' ', list_to_join)


cpdef object _concat_date_cols(tuple date_cols,
bint keep_trivial_numbers=False):
def _concat_date_cols(tuple date_cols, bint keep_trivial_numbers=False):
cdef:
Py_ssize_t min_size = 0, sequence_size = len(date_cols)
Py_ssize_t rows_count = 0, col_count = len(date_cols)
cnp.ndarray[object] result
int all_numpy = 1

if sequence_size == 0:
if col_count == 0:
return np.zeros(0, dtype=object)

for i, array in enumerate(date_cols):
if not util.is_array(array):
all_numpy = 0
# find min length for arrays in date_cols
# imitation python zip behavior
if len(array) < min_size or min_size == 0:
min_size = len(array)

result = np.zeros(min_size, dtype=object)
if all_numpy:
# call special function to increase performance
concat_date_cols_numpy(date_cols, result, min_size,
keep_trivial_numbers)
rows_count = min(len(array) for array in date_cols)

result = np.zeros(rows_count, dtype=object)
if all(util.is_array(array) for array in date_cols):
# call specialized function to increase performance
_concat_date_cols_numpy(date_cols, result, rows_count, col_count,
keep_trivial_numbers)
else:
concat_date_cols_sequence(date_cols, result, min_size,
keep_trivial_numbers)
_concat_date_cols_sequence(date_cols, result, rows_count, col_count,
keep_trivial_numbers)
return result