aboutsummaryrefslogtreecommitdiffstats
path: root/re.c
diff options
context:
space:
mode:
authorshyouhei <shyouhei@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2018-11-21 08:51:39 +0000
committershyouhei <shyouhei@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2018-11-21 08:51:39 +0000
commit953091a4b1b862340e59ab8a9c9755342f7488c3 (patch)
treef119733cddad27f873d41c1ddd11d14f6d45fb62 /re.c
parentf1ed4b713b2a6adf1cca30eaf0f7874ea4f1577a (diff)
downloadruby-953091a4b1b862340e59ab8a9c9755342f7488c3.tar.gz
char is not unsigned
It seems that decades ago, ruby was written under assumption that char is unsigned. Which is of course a false assumption. We need to explicitly store a numeric value into an unsigned char variable to tell we expect 0..255 value. git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@65900 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
Diffstat (limited to 're.c')
-rw-r--r--re.c9
1 files changed, 5 insertions, 4 deletions
diff --git a/re.c b/re.c
index 9223689b42..84027707e0 100644
--- a/re.c
+++ b/re.c
@@ -2539,7 +2539,7 @@ unescape_nonascii(const char *p, const char *end, rb_encoding *enc,
VALUE buf, rb_encoding **encp, int *has_property,
onig_errmsg_buffer err)
{
- char c;
+ unsigned char c;
char smallbuf[2];
while (p < end) {
@@ -2602,8 +2602,9 @@ unescape_nonascii(const char *p, const char *end, rb_encoding *enc,
p = p-2;
if (enc == rb_usascii_encoding()) {
const char *pbeg = p;
- c = read_escaped_byte(&p, end, err);
- if (c == (char)-1) return -1;
+ int byte = read_escaped_byte(&p, end, err);
+ if (byte == -1) return -1;
+ c = byte;
rb_str_buf_cat(buf, pbeg, p-pbeg);
}
else {
@@ -2652,7 +2653,7 @@ escape_asis:
break;
default:
- rb_str_buf_cat(buf, &c, 1);
+ rb_str_buf_cat(buf, (char *)&c, 1);
break;
}
}