Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Error on C Warnings #32163

Merged
merged 13 commits into from
Mar 19, 2020
4 changes: 2 additions & 2 deletions pandas/_libs/hashtable_class_helper.pxi.in
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ cdef class StringVector:

append_data_string(self.data, x)

cdef extend(self, ndarray[:] x):
cdef extend(self, ndarray[object] x):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

im not sure this is accurate, might be a string dtype? IIRC this class isnt used

for i in range(len(x)):
self.append(x[i])

Expand Down Expand Up @@ -237,7 +237,7 @@ cdef class ObjectVector:
self.external_view_exists = True
return self.ao

cdef extend(self, ndarray[:] x):
cdef extend(self, ndarray[object] x):
for i in range(len(x)):
self.append(x[i])

Expand Down
7 changes: 2 additions & 5 deletions pandas/_libs/internals.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -371,8 +371,8 @@ def get_blkno_indexers(int64_t[:] blknos, bint group=True):
Py_ssize_t i, start, stop, n, diff

object blkno
list group_order
dict group_dict
list group_order = []
dict group_dict = {}
int64_t[:] res_view

n = blknos.shape[0]
Expand All @@ -393,9 +393,6 @@ def get_blkno_indexers(int64_t[:] blknos, bint group=True):

yield cur_blkno, slice(start, n)
else:
group_order = []
group_dict = {}

for i in range(1, n):
if blknos[i] != cur_blkno:
if cur_blkno not in group_dict:
Expand Down
3 changes: 1 addition & 2 deletions pandas/_libs/parsers.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -701,7 +701,7 @@ cdef class TextReader:
char *word
object name, old_name
int status
uint64_t hr, data_line
uint64_t hr, data_line = 0
char *errors = "strict"
StringPath path = _string_path(self.c_encoding)

Expand Down Expand Up @@ -805,7 +805,6 @@ cdef class TextReader:
self._tokenize_rows(1)

header = [ self.names ]
data_line = 0

if self.parser.lines < 1:
field_count = len(header[0])
Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/src/ujson/python/objToJSON.c
Original file line number Diff line number Diff line change
Expand Up @@ -1454,7 +1454,7 @@ char **NpyArr_encodeLabels(PyArrayObject *labels, PyObjectEncoder *enc,
1000000000LL; // nanoseconds per second
} else {
// datetime.* objects don't follow above rules
nanosecVal = PyDateTimeToEpoch(item, NPY_FR_ns);
nanosecVal = PyDateTimeToEpoch((PyDateTime_Date *)item, NPY_FR_ns);
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/writers.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def write_csv_rows(list data, ndarray data_index,
"""
# In crude testing, N>100 yields little marginal improvement
cdef:
Py_ssize_t i, j, k = len(data_index), N = 100, ncols = len(cols)
Py_ssize_t i, j = 0, k = len(data_index), N = 100, ncols = len(cols)
list rows

# pre-allocate rows
Expand Down
2 changes: 1 addition & 1 deletion pandas/io/sas/sas.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ cdef const uint8_t[:] rdc_decompress(int result_length, const uint8_t[:] inbuff)

cdef:
uint8_t cmd
uint16_t ctrl_bits, ctrl_mask = 0, ofs, cnt
uint16_t ctrl_bits = 0, ctrl_mask = 0, ofs, cnt
int rpos = 0, k
uint8_t[:] outbuff = np.zeros(result_length, dtype=np.uint8)
Py_ssize_t ipos = 0, length = len(inbuff)
Expand Down
11 changes: 9 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,8 +433,7 @@ def run(self):
extra_compile_args.append("/Z7")
extra_link_args.append("/DEBUG")
else:
# args to ignore warnings
extra_compile_args = []
extra_compile_args = ["-Werror"]
extra_link_args = []
if debugging_symbols_requested:
extra_compile_args.append("-g")
Expand Down Expand Up @@ -477,6 +476,14 @@ def run(self):
# we can't do anything about these warnings because they stem from
# cython+numpy version mismatches.
macros.append(("NPY_NO_DEPRECATED_API", "0"))
if "-Werror" in extra_compile_args:
try:
import numpy as np
except ImportError:
pass
else:
if np.__version__ < LooseVersion("1.16.0"):
extra_compile_args.remove("-Werror")


# ----------------------------------------------------------------------
Expand Down